def __init__(self, word, meaning=None, synonyms=[], examples=[], extra={}): self.word = word.strip().lower() self.url = BASE_URL.format(Utils.remove_accents(self.word)) self.meaning = meaning self.synonyms = synonyms self.extra = extra self.examples = examples
def search(self, word): """ Search for word. """ if len(word.split()) > 1: return None _word = Utils.remove_accents(word).strip().lower() try: with self.get(BASE_URL.format(_word)) as request: page = html.unescape(request.read().decode(CHARSET)) except: return None return Word(word, meaning=self.scrape_meaning(page), synonyms=self.scrape_synonyms(page), examples=self.scrape_examples(page), extra=self.scrape_extra(page), )
def search(self, word): """ Search for word. """ if len(word.split()) > 1: return None _word = Utils.remove_accents(word).strip().lower() try: url = request.urlopen(BASE_URL.format(_word)) except: return None page = html.unescape(url.read().decode(CHARSET)) if page.find(TAG_ENCHANT[0]) > -1: return None found = Word(word) found.meaning = self.meaning(page) found.synonyms = self.synonyms(page) found.extra = self.extra(page) return found
def search(self, word): """ Search for word. """ if len(word.split()) > 1: return None _word = Utils.remove_accents(word).strip().lower() try: with self.get(BASE_URL.format(_word)) as request: page = html.unescape(request.read().decode(CHARSET)) except: return None meaning, etymology = self.scrape_meaning(page) return Word( Utils.text_between(page, "<h1", "</h1>", force_html=True).lower(), meaning=meaning, etymology=etymology, synonyms=self.scrape_synonyms(page), examples=self.scrape_examples(page), extra=self.scrape_extra(page), )
def __init__(self, word, meaning=None, synonyms=[], extra={}): self.word = word.strip().lower() self.url = BASE_URL.format(Utils.remove_accents(word).strip().lower()) self.meaning = meaning self.synonyms = synonyms self.extra = extra