def build_word_cloud(params):

        tokens = ast.literal_eval(params.tokens)
        hash_tag = params.hash_tag
        try:
            api = SMAuth.tweepy_auth(tokens['consumer_key'], tokens['consumer_secret'], tokens['access_token'], tokens['access_token_secret'])
            data_ = Tw.hashtag_search(api, hash_tag)
            wc_data = json.loads(wc.wordcloud_json(data_))
            data = cmg.format_response(True,wc_data,'Data successfully processed!')
        except ValueError, err:
            data = cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
def sentiment_analysis(params):

        try:
            tokens = ast.literal_eval(params.token)
        except ValueError:
            tokens = params.token
        source = str(params.source)
        try:
            limit = params.limit
        except AttributeError:
            limit = ''
        try:
            since = params.since
        except AttributeError:
            since = ''
        try:
            until = params.until
        except AttributeError:
            until = ''
        try:
            post_ids = ast.literal_eval(params.post_ids)
        except AttributeError:
            post_ids = None
        page = 'me'
        try:
            page = str(params.page)
        except AttributeError:
            pass
        try:
            hash_tag = str(params.hash_tag)
        except AttributeError:
            hash_tag = ''
        #analyzed_data = 'Incorrect datasource name provided!'
        if source == 'twitter':
            api = SMAuth.tweepy_auth(tokens['consumer_key'], tokens['consumer_secret'], tokens['access_token'], tokens['access_token_secret'])
            data_ = Tw.hashtag_search(api,hash_tag)
            #lsi.initialize_stream(hash_tag, unique_id, tokens) # if already exits do something
            #analyzed_data = smlf.process_social_media_data(unique_id, hash_tag)
            data = sa.sentiment(data_)
            result = cmg.format_response(True,data,'Data successfully processed!')
            return result

        elif source == 'facebook':
            try:
                data = FB.get_page_posts_comments(tokens, limit, since, until, page, post_ids)
            except ValueError, err:
                data = cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
                return data

            #full_comment = []
            analyzed_data = []

            def _calculate_sentiment(post_id, comment_string):
                full_comment_str = ''
                for j in filtered_comments:
                    for comment in j['comments']:
                        full_comment_str += ' '
                        full_comment_str += comment['message'].encode('UTF8')
                logger.debug(full_comment_str)
                data_ = sa.sentiment(full_comment_str)
                full_comment_str = ''
                data_['post_id'] = post_id
                analyzed_data.append(data_)

            threads = []
            if post_ids is not None:
                for post_id in post_ids:
                    filtered_comments = filter(lambda d: d['post_id'] in post_id, data)
                    t = threading.Thread(target=_calculate_sentiment, args=(post_id, filtered_comments))
                    t.start()
                    print "Thread started to calculate sentiment analysis for post_id: {0}".format(post_id)
                    threads.append(t)
                    #full_comment_str.join(full_comment)
                    #analysed_data = sa.sentiment(full_comment_str.join(filtered_comments))
                for t in threads:
                    try:
                        t.join()
                    except Exception, err:
                        print err

                data = cmg.format_response(True,analyzed_data,'Data successfully processed!')
                return data