def check_entities(uri, test_input, input_type, expected_output): client = textapi.Client("app_id", "app_key") httpretty.register_uri(httpretty.POST, uri, body=expected_output) entities = client.Entities(test_input) ok_(input_type in httpretty.last_request().parsed_body) ok_('entities' in entities) ok_('person' in entities['entities'])
def aylien(url): from aylienapiclient import textapi client = textapi.Client('a8b3a850', 'c46e7039ff55dd00866b3dc1de4ee9d7') ''' extract = client.Extract({'url': url, 'language':'en'}) sentiment = client.Sentiment({'text':'sample text', 'language':'en'}) classifications = client.ClassifyByTaxonomy({'url': url, 'language':'en', 'taxonomy': 'iptc-subjectcode'}) #codes demo: http://show.newscodes.org/index.html?newscodes=subj&lang=en-GB&startTo=Show classification = client.Classify({'url': url, 'language':'en'}) entities = client.Entities({'text': text, 'language':'en'}) concepts = client.Concepts({'text': text, 'language':'en'}) summary = client.Summarize({'url': url, 'sentences_number': 3}) ''' combined = client.Combined({ 'url': url, 'language': 'en', 'endpoint': [ 'extract', 'sentiment', 'classify/iptc-subjectcode', 'classify', 'entities', 'concepts' ] }) for result in combined['results']: print(result['endpoint']) print(result['result'])
def analyzeValue(para): c = textapi.Client("3f0bd976", "6b6565eed1a5d6bdaddee4823903727f") s = c.Sentiment({'text': para}) #classify = c.ClassifyByTaxonomy({'text': "Jim is a singer and jumper", 'taxonomy': "iab-qag"}) #summary = c.Summarize('http://techcrunch.com/2014/02/27/aylien-launches-text-analysis-api-to-help-developers-extract-meaning-from-documents/') #print(summary) #print(classify) value = 0 if s['polarity'] == 'positive': value += s['polarity_confidence'] else: value -= s['polarity_confidence'] if s['subjectivity'] == 'objective': value += s['subjectivity_confidence'] else: value -= s['subjectivity_confidence'] #print(value, s) return value
def get_summary( file, text, aylien_app_id, aylien_API_KEY, ): """ Params -------- Returns -------- summary_text : str Summarized text summary_bullerts : list List that contains each scentence of the summary as individual entries """ print("Calling Aylien API", end="\r") aylien = textapi.Client(aylien_app_id, aylien_API_KEY) summary = aylien.Summarize({ "title": file, "text": text, "sentences_number": cfg_summary_len, "language": cfg_summary_lang }) summary_text = " ".join(summary["sentences"]) summary_bullets = summary["sentences"] print("Summary Successfull") return summary_text, summary_bullets
async def storeArticle(id): print(id) await asyncio.sleep(2) try: client = textapi.Client("9cf3ddcd", "b3b1304158f0a52adc0f1f970059edc3") x = requests.get("https://hacker-news.firebaseio.com/v0/item/" + str(id) + ".json?print=pretty").json() article = ArticleBackup() if 'url' in x: article.url = x['url'] else: article.url = " " if 'by' in x: article.by = x['by'] else: article.by = " " if 'title' in x: article.title = x['title'] else: article.title = " " if 'score' in x: article.score = x['score'] else: article.score = " " sentiment = client.Sentiment({'text': article.title}) article.sentimentPolarity = sentiment['polarity'] article.save() except: print("Error in storing article") return
def aylien_for_sentiment(self, poem, language): poem_str = '' for line in poem: poem_str += line[0].lower() + ' ' client = textapi.Client('3e6d3188', 'a59809a95ed4dbf11af753e374b64605') sentiment = client.Sentiment({'language': language, 'text': poem_str}) return sentiment['polarity']
def senti(request): client = textapi.Client("42f857c1", "14a7ac2d47989ded74c0f1f49522cfdc") # feedo = feedback.objects.values_list('feedback') # query_results = feedback.objects.all() # #sentiment = client.Sentiment({'text': }) # l=[] # tot = len(feedo) # for i in range(tot): # st = str(feedo[i][0]) # sentiment = client.Sentiment({'text':st}) # l.append(sentiment['polarity']) # #l[query_results[i].feedback_id]=sentiment['polarity'] # d={'sentu':l,'query_results':query_results,'tot':tot} # print(d) # # #return HttpResponse('running') # return render(request,'eventze/senti.html',d) table = feedback.objects.all().values_list() for e in table: print(e) if e[4] == "": print("match") #sentiment analysis sentiment = client.Sentiment({'text': str(e[3])}) t = feedback.objects.get(feedback_id=int(e[0])) t.sentim = sentiment['polarity'] t.save() query_results = feedback.objects.all() return render(request, 'eventze/senti.html', {'query_results': query_results})
def home(request): result1 = {} response = requests.get( 'https://hacker-news.firebaseio.com/v0/topstories.json') request.session['topdata'] = response.json() result = request.session['topdata'][0:25] client = textapi.Client("88f165f7", "7a3f743c0e8a7ccc1f881f5d5c6ca245") count = 25 for story in result: response1 = requests.get( 'https://hacker-news.firebaseio.com/v0/item/%s.json' % story) print(response1.json()) request.session['topdata'] = response1.json() sentiment = client.Sentiment( {'text': request.session['topdata']['title']}) request.session['topdata']['polarity'] = sentiment['polarity'] news_instance = News( by=request.session['topdata']['by'], title=request.session['topdata']['title'], points=request.session['topdata']['score'], #comments=request.session['topdata']['descendants'], sentiment=request.session['topdata']['polarity']) news_instance.save() result1[story] = request.session['topdata'] count = count - 1 if count < 0: break return render(request, 'newapp/home.html', { 'result': result, 'result1': result1 })
def test_aspect_based_sentiment(): client = textapi.Client(APP_ID, APP_KEY) classify = client.AspectBasedSentiment({'text': "Delicious food. Disappointing service.", 'domain': "restaurants"}) for prop in ['text', 'domain', 'sentences', 'aspects']: ok_(prop in classify) ok_(isinstance(classify['aspects'], list)) ok_(isinstance(classify['sentences'], list))
def check_extract(uri, test_input, input_type, expected_output): client = textapi.Client("app_id", "app_key") httpretty.register_uri(httpretty.POST, uri, body=expected_output) article = client.Extract(test_input) ok_('url' in httpretty.last_request().parsed_body) ok_('author' in article) ok_('title' in article)
def test_classify(): client = textapi.Client(APP_ID, APP_KEY) classify = client.Classify( {'url': 'http://www.bbc.com/sport/0/football/25912393'}) for prop in ['text', 'language', 'categories']: ok_(prop in classify) ok_(isinstance(classify['categories'], list))
def test_hashtags(): client = textapi.Client(APP_ID, APP_KEY) hashtags = client.Hashtags( {'url': 'http://www.bbc.com/sport/0/football/25912393'}) for prop in ['text', 'language', 'hashtags']: ok_(prop in hashtags) ok_(isinstance(hashtags['hashtags'], list))
def performSummayAnalysis(url, setences_number=3): client = textapi.Client(APP_ID, API_KEY) summary = client.Summarize({ 'url': url, 'sentences_number': setences_number }) return summary
def summarizer_aylien_get(text, percent_sentences=50): """summarizing using aylien method Args: text (str): the text to summarize percent_sentences (int, optional): percent to retain. Defaults to 50. Returns: dictionary with the method [method], summary [sentences], number of sentences [summary_num_sentences] """ num_sentences = int(percent_sentences / 100 * len(sent_tokenize(text))) output = {} # send request to their service client = textapi.Client("79e389d3", "1bc2400da0cb4745c30fb68b67e5e5cf") out = client.Summarize({ 'sentences_number': num_sentences, 'text': text, 'title': None }) output['summary_num_sentences'] = num_sentences output['sentences'] = "".join([" " + val for val in out['sentences']])[1:] output['method'] = "Aylien" return output
def check_classify(uri, test_input, input_type, expected_output): client = textapi.Client("app_id", "app_key") httpretty.register_uri(httpretty.POST, uri, body=expected_output) classification = client.Classify(test_input) ok_(input_type in httpretty.last_request().parsed_body) ok_('categories' in classification) ok_(hasattr(classification['categories'], "__getitem__"))
class TextHandler: client = textapi.Client('**********', '***************************') def __init__(self, urlEntryS): self.invalidURL = True try: self.dataExtracted = self.client.Extract(urlEntryS) except textapi.HttpError: self.invalidURL = False def isValid(self): return self.invalidURL def getTitle(self): self.title = self.dataExtracted["title"] return self.title def getArticle(self): self.article = self.dataExtracted["article"] return self.article def getAudio(self): fullText = self.title + '\n' + self.article if ConvertTextToSpeech.detectDialect(fullText) is None: self.invalidURL = False else: ConvertTextToSpeech.getMP3FromText(fullText)
def post(self): data = request.get_json() decoded = None secret_token = request.headers['Authorization'] try: decoded = secret_token(request.headers['Authorization']) except: return {'error': True, 'errorMessage': 'Invalid access_token'}, 403 if not decoded: return {'error': True, 'errorMessage': 'Invalid access_token'}, 403 if not summarizer_validator.validate(data): return {'error': True, 'errorMessage': summarizer_validator.errors}, 400 db_data = users.find_one({'email': decoded['email']}) flag = 0 for app in db_data['applications']: if app['name'] == decoded['app_name'] and app['secret_token'] == secret_token and 'summarizer' in app['allowed_apis']: flag = 1 if flag == 0: return {'error': True, 'errorMessage': 'Invalid secret token'}, 403 client = textapi.Client( os.getenv('AYLIEN_APP_ID'), os.getenv('AYLIEN_API_KEY')) summary = client.Summarize( {'text': data['text'], 'title': data['title'], 'sentences_number': 3}) return {'error': False, 'results': summary}
def retrieveTextFromURL(self) -> list: """ Get the text from each url retrieved from retrieveNews Parameters ---------- None Returns ------- articles : list[str] a plain text of all the articles """ # Check URLS if len(self.urls) == 0: raise URLException('Error has occured retrieving news URLs') # Create the client client = textapi.Client(self._api_keys['textapi']['applicationId'], self._api_keys['textapi']['applicationKey']) count = 0 for url in self.urls: extract = client.Extract({'url': url}) self.articles.append(extract['article']) count += 1 if count == 50: break return self.articles
def get(request): list_feeds_id = LIST_FEEDS_ID list_feeds_url = LIST_FEED_URL app_id = APP_ID app_key = APP_KEY #list_accounts = [] try: res = requests.get(list_feeds_id) list_id = res.json() for id in list_id: #LOG.info("sleeping in account urls request...") res = requests.get(list_feeds_url.format(str(id))) resjson = res.json() news = Newsfeed() news.id = resjson.get('id') news.by = resjson.get('by') news.title = resjson.get('title') news.score = resjson.get('score') news.type = resjson.get('type') news.url = resjson.get('url') client = textapi.Client(app_id, app_key) sentiment = client.Sentiment({'text': news.title}) news.computedsentiment = sentiment['polarity'] news.save() print("Saving Records to DB") time.sleep(5) #LOG.info(">>> account under project {0}, {1}".format(str(project),resjson)) return except Exception as e: #LOG.error("Error is occurred in list of projects:%s"%e) raise
def home(request): if request.method == 'POST': form = URL(request.POST) if form.is_valid(): endpoints = [ 'Sentiment', 'Classify', 'Entities', 'Concepts', 'Summarize' ] url = form.cleaned_data['url'] data = {} client = textapi.Client('c0fcb430', '749f0eb9482c31ea51fb1b92a028231a') try: for endpoint in endpoints: command = 'client.' + endpoint + "({'url': url})" data[endpoint] = (eval(command)) context = {'data': data} dump = json.dumps(context) if form.cleaned_data['type'][0] == 'json': return HttpResponse(dump, content_type='application/json') elif form.cleaned_data['type'][0] == 'table': context['form'] = URL() return render(request, 'detail.html', context=context) else: return HttpResponse(dump, content_type='text/x-json') except HttpError: return render(request, 'error.html', {'form': URL()}) else: form = URL() return render(request, 'home.html', {'form': form})
def test_concepts(): client = textapi.Client(APP_ID, APP_KEY) concepts = client.Concepts( {'url': 'http://www.bbc.co.uk/news/business-25821345'}) for prop in ['text', 'language', 'concepts']: ok_(prop in concepts) ok_(isinstance(concepts['concepts'], dict))
def get_sentiment_bulk(reviews): # aylien = textapi.Client("4969e38e", "f8de4ced275a6b449a677d3efeae6e5b") aylien = textapi.Client("ea9b1309", "59ad1ddbae972c6526c920dfb0c5116b") print(reviews) for i in range(len(reviews)): t = reviews[i]['text'] s = reviews[i]['title'] text_sentiment = aylien.Sentiment({'text': t}) print(text_sentiment) text_polarity = text_sentiment['polarity'] text_polarity_conf = text_sentiment['polarity_confidence'] text_score = calculate_score(text_polarity, text_polarity_conf) reviews[i]['text_score'] = text_score sum_sentiment = aylien.Sentiment({'text': s}) sum_polarity = sum_sentiment['polarity'] sum_polarity_conf = sum_sentiment['polarity_confidence'] sum_score = calculate_score(sum_polarity, sum_polarity_conf) reviews[i]['title_score'] = sum_score reviews[i]['hybrid_score'] = getHybridScore(t, s, text_score, sum_score) return reviews
def test_sentiment(): client = textapi.Client(APP_ID, APP_KEY) sentiment = client.Sentiment({'text': 'John is a very good football player!'}) for prop in ['polarity', 'subjectivity', 'polarity_confidence', 'subjectivity_confidence']: ok_(prop in sentiment) rate_limits = client.RateLimits() for prop in ['limit', 'remaining', 'reset']: ok_(prop in rate_limits)
def getSentimentPolarity(text): c = textapi.Client("YOUR_APP_ID", "YOUR_ACCESS_KEY") try: s = c.Sentiment({'text': text}) except Exception as ex: log("Exception in Aylien Api ",str(ex)) return "" return s['polarity']
def aylien(self): client = textapi.Client("7e479b2e", d["aylien"]["key"]) summary = client.Summarize({ 'title': "", 'text': ". ".join(self.conversation), 'sentences_number': self.num_sentences }) return summary["sentences"]
def defineType(para): c = textapi.Client("3f0bd976", "6b6565eed1a5d6bdaddee4823903727f") classify = c.ClassifyByTaxonomy({'text': para, 'taxonomy': "iab-qag"}) return classify['categories'][0]['label']
def test_extract(): client = textapi.Client(APP_ID, APP_KEY) extract = client.Extract({ 'url': 'http://techcrunch.com/2014/02/27/aylien-launches-text-analysis-api-to-help-developers-extract-meaning-from-documents/' }) for prop in ['author', 'image', 'article', 'videos', 'title', 'feeds']: ok_(prop in extract)
def test_summarize(): client = textapi.Client(APP_ID, APP_KEY) summary = client.Summarize( 'http://techcrunch.com/2014/02/27/aylien-launches-text-analysis-api-to-help-developers-extract-meaning-from-documents/' ) for prop in ['text', 'sentences']: ok_(prop in summary) ok_(isinstance(summary['sentences'], list))
def test_classify_by_taxonomu(): client = textapi.Client(APP_ID, APP_KEY) classify = client.ClassifyByTaxonomy({ 'text': "John is a very good football player", 'taxonomy': "iab-qag" }) for prop in ['text', 'language', 'categories']: ok_(prop in classify) ok_(isinstance(classify['categories'], list))
def test_entities(): client = textapi.Client(APP_ID, APP_KEY) entities = client.Entities({ 'url': 'http://www.businessinsider.com/carl-icahn-open-letter-to-apple-2014-1' }) for prop in ['text', 'language', 'entities']: ok_(prop in entities) ok_(isinstance(entities['entities'], dict))