def get_twitter(user, twitter_name): print "entered twitter" # check if user exists if (Twitter_User.objects.filter(username = twitter_name).exists()): print "exists already" # increment the tweets that haven't been cached return HttpResponseRedirect('http://yahoo.com') else: crawl_user = crawl_tt.twitter_user(twitter_name) twitter_user = Twitter_User(Id = user, username = twitter_name, twitter_id = crawl_user.user.id , location = crawl_user.user.location, description = crawl_user.user.description, followers = str(crawl_user.get_followers()), friends = str(crawl_user.get_friends()) ) twitter_user.save() print "saved twitter user" tweets = crawl_user.get_tweets() for tweet in tweets: add_tweet = Tweets(Id = user, created_at = tweet.created_at, text= tweet.text, coords = 'llala', hashtags = tweet.hashtags(),links=tweet.links()) add_tweet.save() return ''
def post(self): data = {"status": "success", "error_message": ""} form = TweetCreateForm() if form.validate(): try: print(session["user_id"]) tweet = Tweets( tweet_content=form.tweet_content.data, user=session["user_id"], tweet_date=datetime.now(), ) tweet.save() except Exception as e: data["status"] = "error" data["error_message"] = _("Unknown error occured") print(e) finally: if data["status"] == "error": data["error_message"] = render_template( "errors/ajax_errors.html", form=form, data=data) else: data["status"] = "error" data["error_message"] = render_template("errors/ajax_errors.html", form=form, data=data) return jsonify(data)
def collectTweets(p_noms): os.chdir('../sentimentClassifier') with open('savedNBClassifier.pkl', 'rb') as f: NBClassifier = pickle.load(f) auth = tweepy.OAuthHandler(twitter_creds.TWITTER_APP_KEY, twitter_creds.TWITTER_APP_SECRET) auth.set_access_token(twitter_creds.TWITTER_KEY, twitter_creds.TWITTER_SECRET) api = tweepy.API(auth) for nom in p_noms: results = api.search(q=nom, count=10) for (idx, tweet) in enumerate(results[0:10]): tweet_text = results[idx].text processedTestLine = tweetProcessor.formatTweet(tweet_text) sent = NBClassifier.classify( tweetProcessor.extract_features( tweetProcessor.getFeatureVector(processedTestLine))) tweet = Tweets(nominee=nom, tweet=tweet_text, sentiment=sent) tweet.save()
def profile_timeline(id): if request.method == 'POST': tweet = Tweets(content=request.form['tweet'], tweet_owner=current_user.id) owner_id = tweet.tweet_owner owner = User.query.filter_by(id=owner_id).first() tweet.owner_nickname = owner.nickname tweet.owner_username = owner.username db.session.add(tweet) db.session.commit() return redirect(url_for('profile', id=current_user.id)) elif request.method == 'DELETE': tweet = Tweets.query.filter_by( id=id).first() #Mandar para o controller db.session.delete(tweet) #Mandar para o controller db.session.commit() #Mandar para o controller return redirect(url_for('profile')) elif request.method == 'GET': i = User.query.filter_by(id=current_user.id).first() user = User.query.filter_by(id=id).first() user_tweets = Tweets.query.filter_by(tweet_owner=id).order_by( Tweets.id.desc()).all() followers = user.count_followers() followed = user.count_followed() return render_template('profile.html', user=user, user_tweets=user_tweets, i=i)
def send_tweet(data): logger.info("send_tweet function") try: result = send_tweet.clf.hedging_top_n_classes( data['media'][0]['media_url_https'], top_n=5) logger.info(result) except FailToClassify as e: logger.exception(e.message) return classes = u'<ul>' + \ reduce(lambda x, y: x + y, map(lambda x: '<li>' + x + '</li>', result[0])) + \ u'</ul>' try: t = Tweets(text=data['text'], photo_url=data['media'][0]['media_url_https'], date=data['time'], longitude=data['coordinates'][0], latitude=data['coordinates'][1], classes=classes) db_session.add(t) db_session.commit() except IntegrityError as e: db_session.rollback() logger.exception("IntegrityError: {}".format(e.message)) except: db_session.rollback() e = sys.exc_info()[0] logger.exception("Error: %s" % e)
def fetch_tweets_by_location(location): geocode = geocoder.google(location) radius = vincenty(tuple(geocode.bbox['northeast']), tuple(geocode.bbox['southwest'])).miles tweets_data = Tweets.objects(location=location.lower()).first() if tweets_data: tweets = tweets_data.tweets else: tweets = [] for tweet in tweepy.Cursor( twitter_handler.search, q=urllib.parse.quote('near:{} -RT'.format(location)), within="{}mi".format(radius), result_type='recent', # count=100, since=(datetime.now().date() - timedelta(days=3)).strftime('%Y-%m-%d'), until=(datetime.now().date()).strftime('%Y-%m-%d'), lang='en').items(): tweets.append(tweet.text) # tweets_data = Tweets(location = location.lower(), tweets = tweets) # tweets_data.save() return tweets
def createTweet(request): """ This method is used to create a tweet by a logged in user. 'request.user' represents username of logged in user. 'request.body' contains tweet in JSON format which the user wants to create. """ to_tweet = request.data.get("tweet") if to_tweet is None: return Response({'error':'Please provide content of tweet to create with key tweet'}, status=HTTP_400_BAD_REQUEST) tweet = Tweets(username = request.user, tweet = str(to_tweet)) tweet.save() obj = Tweets.objects.filter(username = request.user, tweet = str(to_tweet)).last() note = "Tweet Creation Successfull, with Tweet ID " + str(obj.id) return Response({'Note': note}, status=HTTP_200_OK)
def watch_stream(): auth = OAuth(ACCESS_TOKEN, ACCESS_TOKEN_SECRET, CONSUMER_KEY, CONSUMER_SECRET) stream = TwitterStream(auth=auth, domain="userstream.twitter.com") for msg in stream.user(): if msg.has_key('text'): if len(msg['entities']['urls']) > 0: user_id = msg['user']['id_str'] user_name = msg['user']['name'] screen_name = msg['user']['screen_name'] text = msg['text'] tweet_id = msg['id_str'] timestamp = msg['timestamp_ms'] created_dt = datetime.datetime.fromtimestamp(int(timestamp)/1000.0) for eurl in msg['entities']['urls']: link = eurl['expanded_url'] if "flic.kr" in link or "twitter.com" in link: continue try: hres = requests.head(link, timeout=3) if hres.status_code == 301 or hres.status_code == 302: link = hres.headers.get("location", link) for s in ["?utm_", "&utm_", "%26utm_", "?from=", "?ref=", "&ref=" "?ncid=rss", "?n_cid="]: if s in link: idx = link.find(s) link = link[:idx] if link == "h": print ">>", link print ">>", eurl['expanded_url'] tweet = Tweets( url=link, text=text, tweet_id=tweet_id, user_id=user_id, user_name=user_name, screen_name=screen_name, created_at=created_dt ) tweet.save() fetch(link, created_dt) except requests.exceptions.Timeout, e: print ">>", "error", link, e except requests.exceptions.SSLError, e: print ">>", "error", link, e except requests.exceptions.ConnectionError ,e: print ">>", "error", link, e
def create_post(request): if request.method == 'POST': post_text = request.POST.get('the_post') response_data = {} post = Tweets(tweet=post_text, user_id=request.user.id) post.save() response_data['result'] = 'Create post successful!' response_data['postpk'] = post.pk response_data['text'] = post.tweet return HttpResponse( json.dumps(response_data), content_type="application/json" ) else: return HttpResponse( json.dumps({"nothing to see": "this isn't happening"}), content_type="application/json" )
def timeline(): if request.method == 'POST': tweet = Tweets(content=request.form['tweet'], tweet_owner=current_user.id) owner_id = tweet.tweet_owner owner = User.query.filter_by(id=owner_id).first() tweet.owner_nickname = owner.nickname tweet.owner_username = owner.username db.session.add(tweet) db.session.commit() return redirect(url_for('timeline')) elif request.method == 'GET': uid = current_user.id user = User.query.filter_by(id=uid).first() tweets = user.followed_posts() followers = user.count_followers() followed = user.count_followed() #all_tweets = Tweets.query.filter().order_by(Tweets.id.desc()).all() return render_template('timeline.html', tweets=tweets, user=user, followers=followers, followed=followed)
def get_pos_values(p_cats): pos_scores = [] for nom in p_cats: pos_sent_query = Tweets.objects(Q(nominee = nom) & Q(sentiment = 'pos')).count() pos_scores.append(pos_sent_query) def roundup (n) : return 10*((n+9)//10) pos_highest_value = max(pos_scores) pos_graph_height = roundup(pos_highest_value) return pos_scores, pos_graph_height
def insert_tweets(tweets): rows = [] for tweet in tweets: rows.append(Tweets( id_str=tweet['id_str'], created_at=tweet['created_at'], user_id=tweet['user']['id'], language=tweet['user']['lang'], geo=tweet['geo'], text=tweet['text'], coordinates=tweet['coordinates'] )) # save tweets in db if len(rows): with session_scope() as s: s.bulk_save_objects(rows)
def get_neg_values(p_cats): neg_scores = [] for nom in p_cats: neg_sent_query = Tweets.objects(Q(nominee = nom) & Q(sentiment = 'neg')).count() neg_scores.append(neg_sent_query) def roundup (n) : return 10*((n+9)//10) neg_highest_value = max(neg_scores) neg_graph_height = roundup(neg_highest_value) return neg_scores, neg_graph_height
def on_status(self, status): if(self.count!=0): print(self.count) tweet = status.text created_at = status.created_at name = status.user.name handle = status.user.screen_name fav_count = status.favorite_count retweet_count = status.retweet_count followers = status.user.followers_count friends = status.user.friends_count favorites = status.user.favourites_count Tweets(tweet=tweet, created_at=created_at, name=name, handle=handle, fav_count=fav_count, retweet_count=retweet_count, followers=followers, friends=friends, favorites=favorites).save() self.count -= 1 return True else: return False
from app import db from models import Tweets db.create_all() db.session.add(Tweets( "neiltyson", 1161318161423228928, "What happens when you don’t pay close enough attention to the lyrics of #BohemianRhapsody... https://t.co/MEII14bbsX", "What happens when you don t pay close enough attention to the lyrics of BohemianRhapsody", 109943622844380451, "23-Feb-2019 (22:30:00.000000)", 13733, 2265, 116, 0.0, 0.5, "2019-02-23", "Saturday", 22 )) db.session.commit()
def save_tweets_in_db(location, tweets): tweets_data = Tweets(location = location.lower(), tweets = tweets) tweets_data.save()
def fetch_tweets_from_db(location): tweets_data = Tweets.objects(location = location.lower()).first() return tweets_data.tweets if tweets_data else None
def main(): parsed = 1 rows_inserted = 0 with open(sys.argv[1]) as f: for line in f: inserted = False try: doc = json.loads(line) except: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) txt = ''.join('' + line for line in lines) print(txt) bot.sendMessage(chat_id=chat_id, text=str(txt)) doc = {"limit": {"track": "err"}} if doc.get('limit') is None: tweet_id = doc.get('id') t_published_at = arrow.get( int(doc.get('timestamp_ms')) / 1000).format('YYYY-MM-DD HH:mm:ss ZZ') t_tweet_text = doc.get('text') t_lang = doc.get('lang') t_source = doc.get('source', "").partition('>')[-1].rpartition('<')[0] t_mentions = [] if doc.get('entities').get('user_mentions'): for m in doc.get('entities').get('user_mentions'): t_mentions.append(m.get('screen_name')) t_in_reply_to_status_id = doc.get('in_reply_to_status_id') t_in_reply_to_user_id = doc.get('in_reply_to_user_id') if t_lang == "en": sentiment = sentiment_analysis(doc) if sentiment == "Positive": t_positive = True t_negative = False t_neutral = False elif sentiment == "Negative": t_positive = False t_negative = True t_neutral = False elif sentiment == "Neutral": t_positive = False t_negative = False t_neutral = True else: t_positive = None t_negative = None t_neutral = None if doc.get("retweeted_status"): t_retweeted = True t_retweeted_id = doc.get("retweeted_status").get('id') else: t_retweeted = None t_retweeted_id = None if doc.get("quoted_status"): t_quoted = True t_quoted_id = doc.get("quoted_status").get('id') else: t_quoted = None t_quoted_id = None u_id = doc.get('user').get('id') tweet = Tweets(id=tweet_id, published_at=t_published_at, tweet_text=t_tweet_text, lang=t_lang, source=t_source, mentions=t_mentions, in_reply_to_status_id=t_in_reply_to_status_id, in_reply_to_user_id=t_in_reply_to_user_id, positive=t_positive, negative=t_negative, neutral=t_neutral, retweeted=t_retweeted, retweeted_id=t_retweeted_id, quoted=t_quoted, quoted_id=t_quoted_id, user_id=u_id) try: u_created_at = arrow.get( doc.get('user').get('created_at'), "ddd MMM DD HH:mm:ss Z YYYY").format( 'YYYY-MM-DD HH:mm:ss ZZ') except: u_created_at = None bot.sendMessage(chat_id=chat_id, text="date error user") u_name = doc.get('user').get('name') u_screen_name = doc.get('user').get('screen_name') u_description = doc.get('user').get('description') u_location = doc.get('user').get('location') u_lang = doc.get('user').get('lang') u_favourites_count = doc.get('user').get('favourites_count') u_followers_count = doc.get('user').get('followers_count') u_following_count = doc.get('user').get('friends_count') u_statuses_count = doc.get('user').get('statuses_count') u_verified = doc.get('user').get('verified') u_geo_enabled = doc.get('user').get('geo_enabled') for k in config.keywords: if any(w in line.lower() for w in config.keywords[k]): MyClass = getattr(importlib.import_module("models"), k) inserted = True usr = MyClass(userid=u_id, created_at=u_created_at, name=u_name, screen_name=u_screen_name, description=u_description, location=u_location, lang=u_lang, favourites_count=u_favourites_count, followers_count=u_followers_count, following_count=u_following_count, statuses_count=u_statuses_count, verified=u_verified, geo_enabled=u_geo_enabled) db_session.merge(usr) rows_inserted += 1 if not inserted: MyClass = getattr(importlib.import_module("models"), "trash") usr = MyClass(userid=u_id, created_at=u_created_at, name=u_name, screen_name=u_screen_name, description=u_description, location=u_location, lang=u_lang, favourites_count=u_favourites_count, followers_count=u_followers_count, following_count=u_following_count, statuses_count=u_statuses_count, verified=u_verified, geo_enabled=u_geo_enabled) db_session.merge(usr) rows_inserted += 1 db_session.merge(tweet) rows_inserted += 1 if not parsed % config.n_commit: db_session.commit() print_time_str(st, parsed, sys.argv[1], rows_inserted) if not parsed % config.n_send: try: bot.sendMessage(chat_id=chat_id, text=time_str(st, parsed, sys.argv[1], rows_inserted)) except: pass parsed += 1 db_session.commit() final = "%s terminated!" % sys.argv[1] bot.sendMessage(chat_id=chat_id, text=final) bot.sendMessage(chat_id=chat_id, text=time_str(st, parsed, sys.argv[1], rows_inserted)) print_time_str(st, parsed, sys.argv[1], rows_inserted)
api_version=3, config_file=None) q = ironmq.queue('tweets') users = [] for token in RawTokens.select(): users.append({"client": token.client, "secret": token.secret}) # lastTweet = Tweets.select().order_by(Tweets.id.desc()).get().tid auth = tweepy.OAuthHandler(CONSUMER_TOKEN, CONSUMER_SECRET) auth.set_access_token(users[0]['client'], users[0]['secret']) api = tweepy.API(auth) for update in api.user_timeline(screen_name="daniel7447d4rb4",count=5): try: print(update.id) tw = Tweets(tid=update.id,send="false") tw.save() except Exception as e: print(e)
q = ironmq.queue('tweets') users = [] for token in RawTokens.select(): users.append({"client": token.client, "secret": token.secret}) # lastTweet = Tweets.select().order_by(Tweets.id.desc()).get().tid auth = tweepy.OAuthHandler(CONSUMER_TOKEN, CONSUMER_SECRET) auth.set_access_token(users[0]['client'], users[0]['secret']) api = tweepy.API(auth) for update in api.user_timeline(screen_name=FOLLOW_USER, count=5): try: print(update.id) tw = Tweets(tid=update.id, send="false") tw.save() except Exception as e: print(e) for update in api.user_timeline(screen_name="mariainesandra", count=5): try: print(update.id) tw = Tweets(tid=update.id, send="false") tw.save() except Exception as e: print(e) for twe in Tweets.select().where(Tweets.send == 'false'): for usr in users: q.post(
from cqlengine import connection from cqlengine.management import sync_table logging.basicConfig(level=logging.INFO) connection.setup(['127.0.0.1'], "tiktok") logging.info("Connected to tiktok database") sync_table(Tweets) sync_table(Queue) sync_table(Tweets_queue) g= Queue.objects.all() for x in g: q= Tweets_queue.objects.filter(Tweets_queue.queue_id == x.id, Tweets_queue.time_to_send < datetime.utcnow()) tweetlist= [] for entry in q: tweet = Tweets.get(id = entry.tweet_id) tweetlist.append(tweet.tweet) logging.info() Tweets_sent.create(queue_id = entry.queue_id, time_sent = datetime.utcnow(), tweet_id = tweet.id) Tweets_queue.objects(queue_id = entry.queue_id, time_to_send = entry.time_to_send).delete() sys.exit()