def getWikipediaAnswer(inp): #Запрос к Вики в любом виде if (utils.isEnglish(inp) > 0.8): res = strongWikiEN(inp) else: res = strongWiki(inp) if (len(res) < 1): res = strongWikiEN(utils.rutoen(inp)) if (len(res) > 5): res = utils.entoru(res) return res
def displayName(self): display_list = ['[COLOR white][B]%s[/B][/COLOR]' % self.seriesName] if self.description: if isEnglish(self.description.decode('utf-8')): display_list.append('-') if not isEnglish(self.description.decode('utf-8')): display_list.append('I') display_list.append(self.description) display_list.append('{start}Episode {no}{end}'.format(start='(' if self.description else '', no=self.episodeNumber, end=',' if self.description else '' )) display_list.append('{start}Season {no})'.format(start='' if self.description else '(', no=self.seasonNumber)) return ' '.join(display_list)
def displayName(self): display_list = ['[COLOR white][B]%s[/B][/COLOR]' % self.seriesName] if self.description: if isEnglish(self.description.decode('utf-8')): display_list.append('-') if not isEnglish(self.description.decode('utf-8')): display_list.append('I') display_list.append(self.description) display_list.append('{start}Episode {no}{end}'.format( start='(' if self.description else '', no=self.episodeNumber, end=',' if self.description else '')) display_list.append('{start}Season {no})'.format( start='' if self.description else '(', no=self.seasonNumber)) return ' '.join(display_list)
def getMedicalArticle( q ): #Реализовать в классе ниже, https://www.ncbi.nlm.nih.gov/books/NBK25498/#chapter3.Introduction -- документация if (utils.isEnglish(q) < 0.8): q = utils.rutoen(q) url = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term" + urllib.parse.urlencode( [("", q)]) + "&usehistory=y" logD("Original query: " + q) bts = urllib.request.urlopen(url, timeout=40) s = bts.read().decode('UTF-8') bts.close() res = "https://www.ncbi.nlm.nih.gov/pubmed/" + getXMLfield(s, "Id") bts = urllib.request.urlopen(res, timeout=40) s = bts.read().decode('UTF-8') bts.close() tit = getXMLfield(s, "title") # tit=tit[:tit.rindex(" res += " -- " + utils.entoru(tit) return res
def getArxiv0(q): #Arxiv.org low if (utils.isEnglish(q) < 0.8): q = utils.rutoen(q) q = q.replace("\n", "") #В отличии от Баша, у Архива есть документированный API: url = "http://export.arxiv.org/api/query?search_query" + urllib.parse.urlencode( [("", q)]) + "&start=0&max_results=1" logD("Original query: " + q) bts = urllib.request.urlopen(url, timeout=40) s = bts.read().decode('UTF-8') bts.close() try: s = s[s.index("<entry>"):s.index("</summary>")] link = s[s.index("<id>") + 4:s.index("</id>")].replace( "/abs/", "/pdf/") desc = utils.entoru(s[s.index("<title>") + 7:s.index("</title>")]) return link + " - " + desc except Exception as e: logD(e) return "Ошибка запроса ("
def getWolframRU( q): #Выполнить запрос к Wolfram|Alpha (сам переведёт, если надо) if (utils.isEnglish(q) < 0.5): q = rutoen(q)[:-1] return entoru(getWolfram(q))