Exemple #1
0
def home():
    if(request.method == 'GET'):
        return render_template('index.html')
    else:
        data = request.form
        print(request.form)
        return (predict_sentiment(request.form['text']) and 'negative' or 'positive')
Exemple #2
0
def search_tweets():
    query = request.json['query']
    tweets = getTweets(query)
    predictions = predict_sentiment(tweets)
    predictions = [
        sentiments[prediction.index(max(prediction))]
        for prediction in predictions
    ]
    sentiment_counts = {'negative': 0, 'neutral': 0, 'positive': 0}
    for sentiment in predictions:
        sentiment_counts[sentiment] += 1
    return jsonify({'results': sentiment_counts}), 200
Exemple #3
0
def predict():
    """
    Return json response with sentiment 
    for a client sentence from PyTorch
    Transformer + GRU model trained on 
    Yelp Reviews dataset
    """
    if request.method == "POST":
        sentence = request.get_data()
        sentence = str(sentence)
        sentiment = predict_sentiment(model, tokenizer, sentence)
        return jsonify({'sentiment': sentiment})
Exemple #4
0
def home():
    if request.method == "POST":
        user_id = request.form.get("userid")
        user_id=user_id.lower().strip()
        if len(user_id)==0:
            return render_template('base.html') + 'PLEASE ENTER USER ID'
        if user_id not in recc_df.index:
            return render_template('base.html') + 'THE USER ID IS NOT AVILABLE IN DATASET PLEASE USE VALID USER ID'
        else:  
            result_df=predict_sentiment(user_id,recc_df)
            return render_template('home.html',predict=result_df.head(5),user=user_id) 
            
    else:
        return render_template('base.html')  
Exemple #5
0
def chat():
    """API point handeling chatbot requests
    
    Args:
        Json input from via POST request
        Request should contain 'input' field
        containing chatbot qery

    Returns:
        JSON response:
        {
            "message": message,
            "source": source-to-scrap-data-from,
            "data": data-relavent-to-source,
            "company": company-name,
        }
    """
    req_data = request.get_json()
    input_query = req_data["input"]

    # Get analysis from diagnolflow
    try:
        flow_output = dflow.collect_intent(input_query)
    except Exception as error:
        pprint(error)
        return "Error"

    # Extract message from DiagnolFlow Response
    message = flow_output.query_result.fulfillment_messages[0].text.text[0]

    # Extract company from DiagnolFlow Response
    try:
        company_field = flow_output.query_result.parameters.fields["Company"]
        company = company_field.list_value.values[0].string_value
    except:
        company = None

    # Extract source from DiagnolFlow Response
    try:
        source_field = flow_output.query_result.parameters.fields["source"]
        source = source_field.list_value.values[0].string_value
    except:
        source = None

    # Scrape data from corresponding source and do sentiment analysis on it
    raw_data = None
    if company is not None:
        if source == "reddit":
            raw_data = scrap.scrape_reddit(company)
            for key in raw_data.keys():
                predictions = predict_sentiment(raw_data[key])
                raw_data[key] = [
                    (sentence, int(prediction))
                    for sentence, prediction in zip(raw_data[key], predictions)
                ]
        elif source == "news":
            raw_data = scrap.scrape_news(company)
            sentences = [news_url["title"] for news_url in raw_data]
            predictions = predict_sentiment(sentences)
            for data, prediction in zip(raw_data, predictions):
                data["sentiment"] = int(prediction)
        elif source == "twitter":
            raw_data = scrap.scrape_twitter(company)
            sentences = [tweet_object["tweet"] for tweet_object in raw_data]
            predictions = predict_sentiment(sentences)
            for data, prediction in zip(raw_data, predictions):
                data["sentiment"] = int(prediction)

    # Return response back to frontend
    response = {
        "message": message,
        "source": source,
        "data": raw_data,
        "company": company,
    }
    return response
def predict():
    text_to_predict = request.json["text"]
    logger.debug("Received: %s" % text_to_predict)
    prediction = ml_predict.predict_sentiment(text_to_predict)
    return jsonify(prediction)
Exemple #7
0
def predict_tweet():
    tweet = request.json['tweet']
    prediction = predict_sentiment([tweet])
    sentiment = sentiments[prediction[0].index(max(prediction[0]))]
    return jsonify({'sentiment': sentiment}), 200
Exemple #8
0
from predict import predict_sentiment

text = input()
print(predict_sentiment(text))