import os import yaml from watson_developer_cloud import PersonalityInsightsV3 with open( os.path.join( os.getcwd(), '../big5_personality/personality_insights/cfg/watson.yml'), 'r') as config: cfg = yaml.load(config) big5_personality = PersonalityInsightsV3( version=cfg['personality']['version'].strftime('%Y-%m-%d'), username=cfg['personality']['username'], password=cfg['personality']['password'])
""" The example returns a JSON response whose content is the same as that in ../resources/personality-v3-expect2.txt """ from __future__ import print_function import json from os.path import join, dirname from watson_developer_cloud import PersonalityInsightsV3 personality_insights = PersonalityInsightsV3( version='2016-10-20', username='******', password='******') with open('profile.txt') as \ profile_json: profile = personality_insights.profile( profile_json.read(), content_type='text/plain', raw_scores=True, consumption_preferences=True) print(json.dumps(profile, indent=2))
import nltk #nltk.download('vader_lexicon') #nltk.download('punkt') from nltk.sentiment.vader import SentimentIntensityAnalyzer as SIA from watson_developer_cloud import PersonalityInsightsV3 from watson_developer_cloud import WatsonApiException from os.path import join, dirname #Watson API Things personality_insights = PersonalityInsightsV3( version='2018-03-19', iam_apikey='IacInRK_7IUlFQTMChf9IpYVamkwZ2pNQjpKmFEvwZe_', url='https://gateway.watsonplatform.net/personality-insights/api' ) def PersonalityScores(column): print("Watson") openness_raw = [] conscientiousness_raw = [] extraversion_raw = [] agreeableness_raw = [] neuroticism_raw = [] openness_perc = [] conscientiousness_perc = [] extraversion_perc = [] agreeableness_perc = [] neuroticism_perc = [] ''' Keys for the traits...
#Read the data data = pd.read_csv('Reviews.csv') #Make a copy of 100 rows for smaller testing small_data = data.head(100).copy() #To view the documentation #print(help(PersonalityInsightsV3)) #------------------------------------------------------------------------- #Instantiate TA Object with your Credentials #------------------------------------------------------------------------- PI = PersonalityInsightsV3( username='******', password='******', version='2016-10-20', url='https://gateway.watsonplatform.net/personality-insights/api') #------------------------------------------------------------------------- #Get the current time on the clock time_start = time.clock() #------------------------------------------------------------------------- #Iterate Over All the Reviews and Append the Result: #------------------------------------------------------------------------- for index, review in small_data['Text'].iteritems(): #Get the words in the review count = len(review.split())
from ibmcloudenv import IBMCloudEnv from watson_developer_cloud import PersonalityInsightsV3 personality_insights = PersonalityInsightsV3( username=IBMCloudEnv.getString('watson_personality_insights_username'), password=IBMCloudEnv.getString('watson_personality_insights_password'), version='2016-10-20') def getService(app): return 'watson-personality-insights', personality_insights
#######Clearly you will never use this in production but it serves to show the methods ##Tweepy and Twitter setup access_key = "800656444328546304-sVhWw2d93iOEpK33TT4hL8DTNmWWxOv" access_secret = "PETOnHEqXwZKagf11d4LXE1CoNuj63rOgmkDuPMQU8exJ" consumer_key = "R8wXApxv1HyOZzVxqpZrK4FzU" consumer_secret = "vdqUjF6ly6Jp3oWygpfIE0Px9fJvrontoqzALALguNILOkaX79" auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_key, access_secret) twitter_api = tweepy.API(auth) ### Personality insights setup personality_insights = PersonalityInsightsV3( version='2017-10-13', username='******', password='******') ##This takes the information from twitter and makes it readable by Personality Insights def convert_status_to_pi_content_item(s): return { 'userid': str(s.user.id), 'id': str(s.id), 'sourceid': 'python-twitter', 'contenttype': 'text/plain', 'language': s.lang, 'content': s.text, #'created': s.created_at_in_seconds, 'reply': (s.in_reply_to_status_id is None),
import csv import time filename = "mbti_1.csv" fields = [] rows = [] tknzr = TweetTokenizer() with open(filename, 'r') as csvfile: csvreader = csv.reader(csvfile) fields = next(csvreader) for row in csvreader: rows.append(row) personality_insights = PersonalityInsightsV3( version='2017-10-13', username='******', password='******') classes = [] data = {} for i in range(len(rows)): rows[i][0] = (" ").join([j for j in rows[i][0].split("|||")]) classes.append(rows[i][1]) classes = list(set(classes)) for c in classes: data[c] = [] for i in range(len(rows)): data[rows[i][1]].append(rows[i][0]) #print(rows[:5])
url = creds['url'] elif os.path.isfile('vcap-local.json'): with open('vcap-local.json') as f: vcap = json.load(f) print('Found local VCAP_SERVICES') if 'personality_insights' in vcap['VCAP_SERVICES']: creds = vcap['VCAP_SERVICES']['personality_insights'][0][ 'credentials'] user = creds['username'] password = creds['password'] url = creds['url'] print user, password personality_insights = PersonalityInsightsV3(username=user, password=password) # On Bluemix, get the port number from the environment variable PORT # When running this app on the local machine, default the port to 8080 port = int(os.getenv('PORT', 8080)) @app.route('/', methods=['POST']) @cross_origin() def test(): return "Working" @app.route('/generate', methods=['POST']) @cross_origin() def generate():
import json import os, sys from watson_developer_cloud import PersonalityInsightsV3 personality_insights = PersonalityInsightsV3( version='2018-4-11', username='******', password='******' # x-watson-learning-opt-out=True ) print("Res >>> ", personality_insights)
import settings import requests import json import os import math import twitter_crawler as tw import re from exceptions import UserNotFoundException, UnauthorizedException, InsufficientTweetsError from watson_developer_cloud import PersonalityInsightsV3 personality_insights = PersonalityInsightsV3( version='2016-10-20', username=settings.PERSONALITY_INSIGHTS_USERNAME, password=settings.PERSONALITY_INSIGHTS_PASSWORD ) class Profile(): def __init__( self, user_id=None, personality=None, values=None): self.user_id = user_id self.personality = personality self.values = values def __str__(self): s = 'user id: {}'.format(
} base = 1600 diff = 800 for key in dic.keys(): auth = dic[key] username = auth[0] password = auth[1] start = base + (diff * int(key)) end = start + diff temp = small_data.loc[start:end,:] PI = PersonalityInsightsV3(version='2017-10-13', username=username, password=password) time_start = time.clock() for index, review in temp['review'].iteritems(): #Get the words in the review count = len(review.split()) #Check if it's greater than 100 if count >= 120: #Enough words to feed into PI #Pass on the review to PI json_output = PI.profile(review,content_type='text/plain')
def get_personality_insights(creds=default_credentials): personality_insights = PersonalityInsightsV3(version=LATEST_VERSION, username=creds['username'], password=creds['password'], url=creds['url']) return personality_insights
try: file7=open(r"stack.text","r") stackover=file7.read() for element in stackover: file2.write('\n') file2.write(element) except: pass file2.close() import json url='https://gateway-lon.watsonplatform.net/personality-insights/api' apikey='NDdDCmUjHrnDr9sTreKFyn4Xj3g2MFpQMfMJlwBc_imz' service=PersonalityInsightsV3(url=url,iam_apikey=apikey,version='2019-07-22') data=open("user.txt","r") text=data.read() profile=service.profile(text,content_type='text/plain').get_result() print(type(profile)) import csv '''csv_columns = ['No','Openness','Conscientiousness','Extraversion','Agreeableness','Emotional range','Challenge','Closeness','Curiosity','Excitement','Harmony','Ideal','Liberty','Love','Practicality','Self-expression','Stability','Structure','Conservation','Openness to change','Hedonism','Self-enhancement','Self-transcendence'] with open('test.csv', 'w') as f: for key in profile.keys(): f.write("%s,%s\n"%(key,my_dict[key]))''' avg= 3*profile['personality'][0]['percentile'],4*profile['personality'][1]['percentile'],2.5*profile['personality'][2]['percentile'],3*profile['personality'][3]['percentile'],3*profile['personality'][4]['percentile'],4*profile['needs'][0]['percentile'],2*profile['needs'][1]['percentile'],4*profile['needs'][2]['percentile'],3.5*profile['needs'][3]['percentile'],3.5*profile['needs'][4]['percentile'],3.5*profile['needs'][5]['percentile'],2*profile['needs'][6]['percentile'],2*profile['needs'][7]['percentile'],4*profile['needs'][8]['percentile'],3*profile['needs'][9]['percentile'],3*profile['needs'][10]['percentile'],3*profile['needs'][11]['percentile'],3*profile['values'][0]['percentile'],3.5*profile['values'][1]['percentile'],2.5*profile['values'][2]['percentile'],3*profile['values'][3]['percentile'],3*profile['values'][4]['percentile'] avg=avg/68 if avg>=0.85: status=3 else if avg>=0.75 and avg<0.85:
def main(params): personality_insights = PersonalityInsightsV3(username=params["username"],password=params["password"],version=params["version"]) return personality_insights.profile(text=params["text"])
'Google'] = 'minor worthless abashed royal cultured kind motionless insidious obtainable zesty fantastic insidious imminent snotty envious last permissible bright fabulous symptomatic barbarous petite ossified deranged upset confused husky scattered disastrous necessary righteous best unknown oval powerful literate halting invincible quizzical aquatic green handsome actually capricious parched infamous delicate wrong incompetent hushed necessary undesirable toothsome abnormal shrill caring maddening fascinated seemly nostalgic stiff dizzy spiffy defective afraid lowly brash encouraging hapless weak tranquil versed understood last icky gratis wary sharp necessary hospitable trashy petite knotty zany deeply first natural first purple equal lying tremendous weak freezing fluffy delicate offbeat sharp ludicrous deeply lethal colossal whimsical vivacious wealthy kaput whole earsplitting premium impossible quizzical enormous hanging bizarre erratic tranquil crazy glib moldy evanescent fabulous shrill animated special ' companies[ 'Amazon'] = 'chivalrous absorbing secretive rainy tame anxious optimal sore drab steep wiry goofy direful shy sturdy ceaseless chivalrous divergent ossified blue-eyed steep nutty immense disturbed immense psychedelic embarrassed huge silky freezing round wicked nosy pricey bouncy skillful three innate macabre trashy black-and-white dispensable wrathful humorous frightening rotten near deadpan long-term gullible handsome grumpy scientific screeching depressed sharp futuristic damaging longing early xenophobic juvenile panicky grubby homely lyrical awful shocking supreme third late evanescent absorbed ready chilly keen silent smart hungry flawless cruel rich yellow uneven teeny ugliest familiar abiding better wealthy testy giant gamy unwritten high-pitched violent slimy adventurous jazzy odd screeching tired gruesome uptight abortive parallel mere dirty necessary royal late talented flagrant evasive tightfisted chivalrous different cloistered scintillating superficial sophisticated cynical tasteful' companies[ 'Uber'] = 'voracious unaccountable substantial null exotic defective hypnotic creepy idiotic glamorous gaudy one malicious large tiresome illustrious faulty gainful highfalutin clammy dysfunctional fluttering cooperative chief famous shut ugly mean statuesque unkempt foolish spiffy parched cowardly creepy repulsive moaning receptive truculent outstanding short knotty grouchy mushy frequent literate living coordinated cynical depressed determined gratis moldy gigantic alert well-groomed domineering young busy ill spooky deeply ancient resolute hard pricey caring gray brown goofy needless majestic long-term lavish nippy ruthless fluffy cruel happy unequal purring cloistered alert friendly unruly satisfying clever same mean used abandoned eight nutty empty jittery purple scientific stupendous unique willing functional delightful elegant terrific torpid royal cultured imported abortive sudden gruesome wacky screeching soft animated therapeutic fabulous psychotic rhetorical glib spotty capricious slow aboard' companies[ 'Microsoft'] = 'physical devilish needy pleasant keen sassy polite historical erect disgusted elastic squealing dry aloof rough detailed malicious aromatic tightfisted slimy wonderful responsible near tame juvenile kaput extra-large powerful illustrious numerous selective striped skinny hurt mysterious aspiring black-and-white satisfying deadpan little exuberant long-term unkempt immense silent historical daily quirky terrible addicted dapper clear devilish tremendous calm past right nimble homeless receptive afraid gabby tender dynamic first abundant deadpan shaggy useful astonishing slow earthy fearful uneven cowardly mixed torpid insidious tightfisted sour standing alleged fine rapid luxuriant glossy pricey adjoining faithful accidental panicky safe real verdant didactic ancient aggressive unbecoming robust screeching tired gruesome uptight abortive parallel mere dirty necessary royal late talented flagrant evasive tightfisted chivalrous different cloistered scintillating superficial sophisticated cynical tasteful functional delightful elegant terrific torpid royal cultured imported abortive sudden gruesome wacky' companies[ 'Intel'] = 'enormous old-fashioned rambunctious bustling evanescent clear inquisitive minor furry third weary disgusted narrow coherent fumbling imaginary obese determined painful eatable defiant deafening ugliest responsible plant observant noxious aberrant teeny military useless clever careful wet plastic painful puzzling excited available motionless befitting abusive well-off elastic waiting lacking upbeat sparkling broad romantic internal obedient giddy worried calculating closed nervous ragged glib wonderful bawdy certain determined aware selective noisy wanting adjoining rude rightful square graceful yellow abortive trashy ruthless ambiguous instinctive lean regular bewildered distinct divergent hot future marked tasteful stupid important selfish blue aware available concerned trashy lucky eminent goofy pushy first cheerful righteous abundant mundane somber possessive womanly kind certain right economic adhesive wiggly elfin red female savory icy elderly nine public flowery disgusted merciful dark cooing gullible complex heavy broad cowardly watery tacit squealing needy awake tawdry smart unarmed nonstop upbeat wicked irritating striped talented vague staking sweltering squalid curved' companies[ 'Nintendo'] = 'tedious whole jaded dashing bustling empty afraid elastic likeable left aloof picayune cooing macho madly bored guarded bustling colossal confused gusty furry pumped calculating repulsive ill tender adventurous disturbed macho panicky craven furtive mysterious thundering elegant nervous rightful devilish exclusive dashing fast vigorous sparkling obtainable onerous orange wealthy able sable lopsided thick zealous spooky second-hand eminent painstaking great high-pitched shrill hellish highfalutin fixed broken adventurous torpid wretched deadpan strong accidental shaggy unequaled separate normal trite piquant colorful one squealing simplistic lazy bewildered cagey secret noiseless future adaptable graceful incandescent stereotyped annoying incredible lame humorous majestic economic last ritzy petite red fixed hulking oafish outstanding striped used offbeat hurried separate impossible silent substantial icky mature hot succinct lyrical somber military useful wrathful spicy receptive plain truthful grumpy needy loose deadpan rural youthful whispering worried hospitable innocent fluffy perpetual healthy sour powerful delirious excellent stereotyped defeated dynamic slim medical gullible purple' companies[ 'Sony'] = 'cagey hapless cuddly medical awful judicious amuck regular depressed vacuous dazzling abashed known wrathful rabid skillful energetic better wistful voracious poor bad animated demonic useful superb nappy fine puzzled luxuriant smoggy shut mature chunky ultra loutish reminiscent coherent complete large slippery scarce hesitant supreme creepy paltry unhealthy ubiquitous wiggly childlike encouraging loving half versed abject nostalgic normal long-term scarce selfish crooked famous bustling adhesive scattered quirky red bouncy spurious tedious hideous skinny thin thin silky beneficial brawny mixed unwieldy new moldy awful poor fluttering hanging chunky brawny psychedelic best exciting wrathful pretty teeny-tiny evanescent zonked watery disastrous fat deafening concerned cheerful simplistic threatening childlike grieving plausible jolly offbeat overconfident jaded macho woozy gifted insidious psychedelic evanescent spiky zippy equable ordinary tasteful ablaze verdant nervous yielding spotless nosy undesirable garrulous madly cruel left flowery jagged rough wooden whispering psychotic naughty kindly abundant used weary graceful heartbreaking yellow abject clammy' personality_insights = PersonalityInsightsV3( version='2017-10-13', iam_apikey='NNlSpXaQCB0NFKCmhsdebwGtYJnsadq8CWPGscH2rfac', url='https://gateway.watsonplatform.net/personality-insights/api') #writes the json file to place def writeToJSONFile(fileName, data): filePathNameWExt = 'data/' + fileName + '.json' with open(filePathNameWExt, 'w') as fp: json.dump(data, fp) #given name and description, will update companies dict and database dict def expand_data(database, comp_name, description): if comp_name not in companies.keys(): companies[comp_name] = description
from watson_developer_cloud import PersonalityInsightsV3 import json import os import sys from termcolor import colored personality_insights = PersonalityInsightsV3( version='2017-10-13', username='', password='', url='https://gateway.watsonplatform.net/personality-insights/api') with open('snowden.txt') as profile_txt: profile = personality_insights.profile(profile_txt.read(), content_type='text/plain', consumption_preferences=True, raw_scores=True).get_result() def consumption(): os.system('clear') ids = [ 'consumption_preferences_shopping', 'consumption_preferences_movie', 'consumption_preferences_music', 'consumption_preferences_reading', 'consumption_preferences_health_and_activity', 'consumption_preferences_entrepreneurship', 'consumption_preferences_environmental_concern', 'consumption_preferences_volunteering' ] names = [
def post_list(request): posts = Post.objects.filter( published_date__lte=timezone.now()).order_by('published_date') tone_analyzer = ToneAnalyzerV3( username='******', password='******', version='2016-05-19 ') language_translator = LanguageTranslator( username='******', password='******') personality_insights = PersonalityInsightsV3( version='2016-10-20', username='******', password='******') natural_language_understanding = NaturalLanguageUnderstandingV1( username='******', password='******', version='2018-03-16') # print(json.dumps(translation, indent=2, ensure_ascii=False)) for post in posts: posting = post.text toneObj = json.dumps(tone_analyzer.tone(tone_input=posting, content_type="text/plain"), indent=2) post.toneObj2 = json.loads(toneObj) post.angerScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][0]['score'] post.disgustScore = post.toneObj2['document_tone']['tone_categories'][ 0]['tones'][1]['score'] post.fearScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][2]['score'] post.joyScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][3]['score'] post.sadScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][4]['score'] translation = language_translator.translate(text=post.text, source='en', target='es') obj = json.dumps(translation, indent=2, ensure_ascii=False) post.obj2 = json.loads(obj) post.wordCount = post.obj2['word_count'] post.letterCount = post.obj2['character_count'] post.translation = post.obj2['translations'][0]['translation'] profile = personality_insights.profile(content=post.text, content_type='text/plain', raw_scores=True, consumption_preferences=True) insobj = (json.dumps(profile, indent=2, ensure_ascii=False)) post.Obj3 = json.loads(insobj) post.wordcount = post.Obj3['word_count'] # post.wordcountmessage = post.Obj3['word_count_message'] post.percentile = post.Obj3['personality'][0]['percentile'] post.rawscore = post.Obj3['personality'][0]['raw_score'] natural = natural_language_understanding.analyze( text=post.text, features=Features(entities=EntitiesOptions(emotion=True, sentiment=True, limit=2), keywords=KeywordsOptions(emotion=True, sentiment=True, limit=2))) naturalobj = (json.dumps(natural, indent=2)) post.Obj4 = json.loads(naturalobj) post.text1 = post.Obj4['usage']['text_units'] post.text2 = post.Obj4['usage']['text_characters'] post.features = post.Obj4['usage']['features'] post.keywords = post.Obj4['keywords'][0]['text'] post.negative = post.Obj4['keywords'][0]['sentiment']['score'] post.sad = post.Obj4['keywords'][0]['emotion']['sadness'] post.joy = post.Obj4['keywords'][0]['emotion']['joy'] post.fear = post.Obj4['keywords'][0]['emotion']['fear'] post.disgust = post.Obj4['keywords'][0]['emotion']['disgust'] post.anger = post.Obj4['keywords'][0]['emotion']['anger'] return render(request, 'blog/post_list.html', {'posts': posts})
from hidden import IBM_personality_insights_api from watson_developer_cloud import PersonalityInsightsV3 import json import pandas as pd personality_insights = PersonalityInsightsV3( version = '2017-09-21', iam_apikey = IBM_personality_insights_api, url = 'https://gateway-wdc.watsonplatform.net/personality-insights/api' ) # To prevent IBM from accessing our data for general service improvements, we # set the X-Watson-Learning-Opt-Out header parameter to true when we create the service instance personality_insights.set_default_headers({'x-watson-learning-opt-out': "true"}) def text_to_big5_personality_pd(profile): df = pd.DataFrame(profile['personality']) df['trait'] = 'Big5' df['name'][4] = 'Neuroticism' return df[['trait','name', 'percentile']] def text_to_openness_sub_personality_pd(profile): df = pd.DataFrame(profile['personality']) df = pd.DataFrame(df[df['name'] == 'Openness']['children'][0]) df['trait'] = 'Openness' return df[['trait','name', 'percentile']] def text_to_conscientiousness_sub_personality_pd(profile):
file1 = col_definition1.split("|")[1] result_id = {id} pers_insights_file_prefix = "pi_" output_filename = pers_insights_file_prefix + str(result_id) + ".csv" df = pd.read_csv(workdir+file1)[[col1]] dict1 = pd.read_csv(workdir+'dict_'+col1+'.csv', dtype={'value': object}).set_index('key')["value"].to_dict() col_name = "text_for_pi" df[col_name] = df[col1].map(dict1).fillna('') ######################################################## from watson_developer_cloud import PersonalityInsightsV3 as PersonalityInsightsV3 personality_insights = PersonalityInsightsV3(username= "******", password= "******") dict_result = {} dict_keywords = {} block = int(len(df)/500) idx=block-1 count=0 ntotal = len(df[col_name].unique()) for el in df[col_name].unique(): if type(el)!=str: el = '' if len(el)>0: sline = el
from watson_developer_cloud import PersonalityInsightsV3 from time import sleep # from ibm_watson import PersonalityInsightsV3 import json from openpyxl import load_workbook import numpy as np # flask 기반 웹 프로그래밍 라이브러리 from flask import Flask, render_template, request, send_from_directory, redirect, url_for from flask_sqlalchemy import SQLAlchemy from sqlalchemy import Column, Integer, String from werkzeug.utils import secure_filename url = 'https://gateway.watsonplatform.net/personality-insights/api' apikey = service = PersonalityInsightsV3(url=url, iam_apikey=apikey, version='2017-10-13') app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' db = SQLAlchemy(app) # DB 구조 class User(db.Model): id = Column(db.Integer, primary_key=True) sns_id = Column(db.VARCHAR(150)) # 연동할 SNS 아이디 platform_id = Column(db.VARCHAR(150)) password = Column(db.VARCHAR(150)) def __init__(self, id=None): id = self.id
#!/usr/bin/python # coding: UTF-8 import re import json, codecs from os import mkdir from os.path import join, dirname, abspath, exists from watson_developer_cloud import PersonalityInsightsV3 from os.path import join, dirname import json personality_insights = PersonalityInsightsV3( version='YOURS', iam_apikey='YOURS', url='https://gateway.watsonplatform.net/personality-insights/api' ) def array_to_str(array): s = '\n'.join(array) return s ## 出力先パスの取得 def getFileName(): json_folder = join(dirname(abspath('__file__')), 'json_folder/') if not exists(json_folder): mkdir(json_folder) return join(json_folder,'line_history.json')
def post_list(request): posts = Post.objects.filter( published_date__lte=timezone.now()).order_by('published_date') tone_analyzer = ToneAnalyzerV3( username='******', password='******', version='2016-05-19 ') language_translator = LanguageTranslator( username='******', password='******') personality_insights = PersonalityInsightsV3( version='2017-10-13', username='******', password='******') # print(json.dumps(translation, indent=2, ensure_ascii=False)) for post in posts: posting = post.text toneObj = json.dumps(tone_analyzer.tone(tone_input=posting, content_type="text/plain"), indent=2) post.toneObj2 = json.loads(toneObj) post.angerScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][0]['score'] post.disgustScore = post.toneObj2['document_tone']['tone_categories'][ 0]['tones'][1]['score'] post.fearScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][2]['score'] post.joyScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][3]['score'] post.sadScore = post.toneObj2['document_tone']['tone_categories'][0][ 'tones'][4]['score'] translation = language_translator.translate(text=post.text, source='en', target='es') obj = json.dumps(translation, indent=2, ensure_ascii=False) post.obj2 = json.loads(obj) post.translate_spanish = post.obj2['translations'][0]['translation'] post.count = post.obj2['word_count'] post.charactercount = post.obj2['character_count'] translation1 = language_translator.translate(text=post.text, source='en', target='ar') kobj = json.dumps(translation1, indent=2, ensure_ascii=False) post.kobj = json.loads(kobj) post.translate_arabic = post.kobj['translations'][0]['translation'] profile = personality_insights.profile(content=post.text, content_type="text/plain", raw_scores=True, consumption_preferences=True) personobj = json.dumps(profile, indent=2, ensure_ascii=False) post.obj3 = json.loads(personobj) # The raw score for the characteristic. post.score0 = post.obj3['personality'][0]['raw_score'] post.score1 = post.obj3['personality'][1]['raw_score'] post.score2 = post.obj3['personality'][2]['raw_score'] post.needs = post.obj3['needs'][0]['raw_score'] # The normalized percentile score for the characteristic post.percentile0 = post.obj3['personality'][0]['percentile'] post.percentile1 = post.obj3['personality'][1]['percentile'] post.percentile2 = post.obj3['personality'][2]['percentile'] # ndicates whether the characteristic is meaningful for the input language. The field is always true for all characteristics of English, Spanish, and Japanese input. post.significance0 = post.obj3['personality'][0]['significant'] # post.significance1= post.obj3['personality'][1]['significant'] # Big 5 characteristics of personality insights and their percentiles post.openness = post.obj3['personality'][0]['children'][0][ 'percentile'] post.conscientiousness = post.obj3['personality'][0]['children'][1][ 'percentile'] post.extraversion = post.obj3['personality'][0]['children'][2][ 'percentile'] post.Agreeableness = post.obj3['personality'][0]['children'][3][ 'percentile'] post.Emotional_range = post.obj3['personality'][0]['children'][4][ 'percentile'] #post.openness = post.obj3['personality'][0]['child'] # = post.obj3['tree']['children'][0]['children'][0]['children'][1]['percentage'] return render(request, 'blog/post_list.html', {'posts': posts})
the role of defending freedom in its hour of maximum danger. I do not shrink\ from this responsibility -- I welcome it. I do not believe that any of us\ would exchange places with any other people or any other generation. The\ energy, the faith, the devotion which we bring to this endeavor will light\ our country and all who serve it -- and the glow from that fire can truly\ light the world.\ \ And so, my fellow Americans: ask not what your country can do for you -- ask\ what you can do for your country.\ \ My fellow citizens of the world: ask not what America will do for you, but\ what together we can do for the freedom of man.\ \ Finally, whether you are citizens of America or citizens of the world, ask of\ us here the same high standards of strength and sacrifice which we ask of you.\ With a good conscience our only sure reward, with history the final judge of\ our deeds, let us go forth to lead the land we love, asking His blessing and\ His help, but knowing that here on earth God's work must truly be our own." personality_insights = PersonalityInsightsV3( version='2017-10-16', username='******', password='******', url='https://gateway.watsonplatform.net/personality-insights/api') profile = personality_insights.profile(content=text2, content_type='text/plain', raw_scores=True, consumption_preferences=True) print(profile)
from discord.ext import commands from watson_developer_cloud import PersonalityInsightsV3 import json import os import time import utils textChatIDlist = ["170682390786605057", "302137557896921089", "302965414793707522", "293186321395220481"] #general, dev, nsf, other with open("secrets.txt", "r") as secretFile: secretKey = [key[:-1] for key in secretFile.readlines()] personality_insights = PersonalityInsightsV3( username=secretKey[12], password=secretKey[13], version='2017-10-13') personality_insights.set_default_headers({'x-watson-learning-opt-out': "true"}) def prettyMessage(filename): with open("Personalities/" + filename + ".txt", 'r') as file: profileData = json.load(file) wordCount = str(profileData["word_count"]) big5 = profileData["personality"] needs = profileData["needs"] consumption = profileData["consumption_preferences"] values = profileData["values"] big5Result = []
from flask import Flask from flask import request from flask import make_response from watson_developer_cloud import AlchemyLanguageV1 from watson_developer_cloud import PersonalityInsightsV3 from bs4 import BeautifulSoup from urllib.request import urlopen # Flask app should start in global layout app = Flask(__name__) alchemy_language = AlchemyLanguageV1(api_key=os.environ['ALCHEMY_API_KEY']) personality_insights = PersonalityInsightsV3( version='2016-10-20', username=os.environ['PERSONALITY_API_USER'], password=os.environ['PERSONALITY_API_PWD']) @app.route('/', methods=['GET', 'POST']) def hello(): return 'I' 'm alive' @app.route('/webhook', methods=['POST']) def webhook(): # Receive request and parse it, this is the format # https://docs.api.ai/docs/webhook#section-sample-request-to-the-service req = request.get_json(silent=True, force=True) print('Request:\n', json.dumps(req, indent=4))
import codecs from klout import * CONSUMER_KEY = 'GdVE1KUrR9A3Cj3OodaGARFaw' CONSUMER_SECRET = 'XgLJvHMFP5EbnZxJDs9o92nTILyDgCwSK21e3zrfRNb4spO32t' ACCESS_KEY = '740727093382643713-DtNLbAyqa2CGKhGcdGOsU6tmJoLM9fV' ACCESS_SECRET = 'N8xkvZAgwwhRFk5GjOkFVqEAIRVwlQMmhwFTw2Re1p3T5' # Make the twitter object twitter = Twython(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET) # Make the Klout object k = Klout('rkyutwv2tna2tkffymmefkuj', secure=True) personality_insights = PersonalityInsightsV3( version='2016-10-20', username='******', password='******') def findAccounts(orgString, k): a = 0 searchString = 'work at ' + orgString accounts = twitter.search_users(q=searchString, count=5) # contains (screenname, score) accountInfo = [] for i in range(0, len(accounts)): # print(accounts[i]['screen_name']) if (accounts[i]['protected'] == False): try:
""" The example returns a JSON response whose content is the same as that in ../resources/personality-v3-expect2.txt """ from __future__ import print_function import json from os.path import join, dirname from watson_developer_cloud import PersonalityInsightsV3 personality_insights = PersonalityInsightsV3( version='2016-10-20', ## url is optional, and defaults to the URL below. Use the correct URL for your region. # url='https://gateway.watsonplatform.net/personality-insights/api', username='******', password='******') ## If service instance provides API key authentication # personality_insights = PersonalityInsightsV3( # version='2016-10-20', # ## url is optional, and defaults to the URL below. Use the correct URL for your region. # url='https://gateway.watsonplatform.net/personality-insights/api', # iam_api_key='your_api_key') with open(join(dirname(__file__), '../resources/personality-v3.json')) as \ profile_json: profile = personality_insights.profile(profile_json.read(), content_type='application/json', raw_scores=True, consumption_preferences=True) print(json.dumps(profile, indent=2))
import json from os.path import join, dirname from watson_developer_cloud import PersonalityInsightsV3 """ The example returns a JSON response whose content is the same as that in ../resources/personality-v3-expect2.txt """ personality_insights = PersonalityInsightsV3( version='2016-10-20', username='******', password='******') with open(join(dirname(__file__), 'ssb4.txt')) as profile_json: profile = personality_insights.profile( profile_json.read(), content_type='text/plain', content_language='en', raw_scores=True, consumption_preferences=True) fname="ssb4.json" with open(fname,'a') as f: f.write(json.dumps(profile, indent=2)) f.close() # print(json.dumps(profile, indent=2))
sys.argv.append("temp_training_set_riya.xlsx") sys.argv.append("e255c8ca-9f1d-4c98-87cb-c33dd4f3f776") sys.argv.append("MLDt74fuvrhh") sys.argv.append("0") sys.argv.append("1") sys.argv.append("e255c8ca-9f1d-4c98-87cb-c33dd4f3f776") # this for multiple instances using REGEX # copy from excel userid/password/start/endrow to text file (ATOM, TEXTWRANGLER, VI etc) # replace TAB (\t) with SPACE # replace the ^ the prefix "nohup python extractEssayPersonalityFeatures.py training_set_riya.xlsx " # replace ^(.*) (.*) (.*) (.*) (.*) (.*) (.*) (.*)$ with $1 $2 $3 $4 $5 $6 $7 $8 </dev/null >$5.log 2>&1 & ''' personality_insights = PersonalityInsightsV3(version='2016-10-20', username=sys.argv[2], password=sys.argv[3]) ''' profile = personality_insights.profile("text", raw_scores=True, consumption_preferences=False) profile['needs'][0]['name'] = profile['needs'][0]['raw_score'] profile['values'][0]['name'] = profile['values'][0]['raw_score'] profile['personality'][0]['name'] = profile['personality'][0]['raw_score'] profile['personality'][0]["children"][0]['name'] = profile['personality'][0]["children"][0]['raw_score'] print(json.dumps(profile, indent=2)) ''' # initialize new feature columns # training_essays_df["Anger"]=0 #''' #for i in range(0, len(training_essays_df)):
from webhook_process import follow_catch # DB用import import DB.koukokuDB.models from DB.koukokuDB.database import init_db app = Flask(__name__) app.config.from_object('DB.koukokuDB.config.Config') init_db(app) # twitter操作のための認証 twitter_account_auth = OAuth1(os.environ['TWITTER_CONSUMER'], os.environ['TWITTER_CONSUMER_SECRET'], os.environ['ACCESS_TOKEN'], os.environ['ACCESS_TOKEN_SECRET']) # watsonAPIのための認証 watson_personal_API = PersonalityInsightsV3( version="2017-10-13", username=os.environ['WATSON_UESR_NAME'], password=os.environ['WATSON_PASSWORD']) # twitterのwebhook設定 @app.route('/webhooks/twitter', methods=['GET']) def webhook_challenge(): # 返信用トークンを生成 twitter_byte = bytearray(os.environ['TWITTER_CONSUMER_SECRET'], "ASCII") crc_token_byte = bytearray(request.args.get('crc_token'), "ASCII") sha256_hash_digest = hmac.new(twitter_byte, crc_token_byte, hashlib.sha256).digest() response = { 'response_token': 'sha256=' + base64.b64encode(sha256_hash_digest).decode('ASCII')