def get_graph_for_topic(topic, limit=50): # print('Loading topic results') tweets, error = data_fetch_public.get_tweets_from_search( topic, limit, data_fetch_constants.DATA_SOURCE_TWEEPY) if error: return [] return get_graph_for_tweets(tweets, limit)
def _get_tweets_based_on_request(request): search_term = request.GET.get('topic') tweet_url = request.GET.get('tweet_url') username = request.GET.get('user') limit = int(request.GET.get('limit', 50)) if search_term: tweets, error = data_fetch_public.get_tweets_from_search( search_term, limit, data_fetch_constants.DATA_SOURCE_TWEEPY ) return tweets elif tweet_url: try: user, tweet_id = utils.get_user_tweet_id_from_tweet_url(tweet_url) except: return render(request, 'clientapp/error.html', {}) tweets, error = data_fetch_public.get_replies_of_tweet( tweet_id, user, limit, data_fetch_constants.DATA_SOURCE_TWEEPY ) return tweets elif username: tweets, error = data_fetch_public.get_tweets_of_user( username, limit, data_fetch_constants.DATA_SOURCE_TWEEPY ) return tweets return None
def _get_tweets_from_hashtag_scores(hashtag_scores, limit): it_count = 0 all_tweets = [] for hashtag_str, score in hashtag_scores: it_count += 1 if it_count > MAX_DEPTH: continue # print('Getting hashtag data for: ', hashtag_str) tweets, error = data_fetch_public.get_tweets_from_search( hashtag_str, limit, data_fetch_constants.DATA_SOURCE_TWEEPY) all_tweets.extend(tweets) return all_tweets
def analyze_topic(request): search_term = request.GET.get('topic') tweets, error = data_fetch_public.get_tweets_from_search( search_term, 300, data_fetch_constants.DATA_SOURCE_TWEEPY ) if error: return JsonResponse({ 'error': error }, safe=False) logger.info('Analyzing toxicity of results') start = time.time() result = [tweet.to_dict() for tweet in tweets] end = time.time() logger.info('Took ' + str(end - start) + ' seconds') _update_database(search_term, result) return JsonResponse(result, safe=False)
def summary(request): search_term = request.GET.get('topic') tweet_url = request.GET.get('tweet_url') username = request.GET.get('user') limit = int(request.GET.get('limit', 50)) error = False tweets = [] about = None analysis_type = TYPE_USER if search_term: tweets, error = data_fetch_public.get_tweets_from_search( search_term, limit, data_fetch_constants.DATA_SOURCE_TWEEPY ) about = 'Topic: %s' % search_term analysis_type = TYPE_TOPIC elif tweet_url: try: user, tweet_id = utils.get_user_tweet_id_from_tweet_url(tweet_url) except: return render(request, 'clientapp/error.html', {}) tweets, error = data_fetch_public.get_replies_of_tweet( tweet_id, user, limit, data_fetch_constants.DATA_SOURCE_TWEEPY ) about = 'User: %s; Tweet: %s' % (user, tweet_id) analysis_type = TYPE_CONVERSATION elif username: tweets, error = data_fetch_public.get_tweets_of_user( username, limit, data_fetch_constants.DATA_SOURCE_TWEEPY ) about = 'User: %s' % username analysis_type = TYPE_USER else: context = { 'homeresult': { 'top_trends': _get_top_trends_data(), 'top_searches': _get_top_searches_data(), } } return render(request, 'clientapp/summary.html', context) if error: return render(request, 'clientapp/error.html', {}) return _render_results_with_summary(request, about, analysis_type, search_term, tweets)
def collect_top_trends(): trends, error = data_fetch_public.get_top_trends( data_fetch_constants.DATA_SOURCE_TWEEPY) if error: return for i in range(len(trends[:10])): name = trends[i]['name'] logger.info("Getting tweets for " + name) start = time.time() tweets, error = data_fetch_public.get_tweets_from_search( name, 300, data_fetch_constants.DATA_SOURCE_TWEEPY) if error: continue result = [_get_tweet_dict(tweet) for tweet in tweets] try: _update_database(name, result) except: logger.exception(sys.exc_info()) continue end = time.time() logger.info('Took ' + str(end - start) + ' seconds')