Example #1
0
def get_raw_data(ticker):
    HOUR = 3600
    max_age = time.time() - HOUR
    return db().collection('raw_data')\
        .where('timestamp', '>', max_age)\
        .where('tickers', 'array_contains', ticker)\
        .get()
Example #2
0
def get_trending():
    start_time = time.time()
    limit = request.args.get("limit")
    if limit is None:
        limit = 6
    else:
        limit = int(limit)
    permitted_metrics = [
        "AHI", "SGP", "RHI", "sentiment", "tweet_mentions",
        "reddit_comment_mentions", "reddit_post_mentions",
        "stocktwits_post_mentions", "yahoo_finance_comment_mentions"
    ]
    metric = request.args.get("metric")
    threshold_metric = request.args.get("threshold_metric")
    if threshold_metric is not None and threshold_metric not in permitted_metrics:
        return jsonify({
            "success": False,
            "error":
            f"unrecognized threshold metric, please choose one of {permitted_metrics}",
            "time_taken": time.time() - start_time
        })
    threshold = request.args.get("threshold")
    if threshold is not None:
        threshold = float(threshold)
    if metric is None:
        metric = 'AHI'
    elif metric not in permitted_metrics:
        return jsonify({
            "success": False,
            "error":
            f"unrecognized metric, please choose one of {permitted_metrics}",
            "time_taken": time.time() - start_time
        })

    trending = db().collection('trending').where('sorted_by', '==',
                                                 metric).where(
                                                     'timestamp', '>',
                                                     time.time() - 3600).get()
    if threshold and threshold_metric in permitted_metrics:
        trending = [
            doc for doc in trending
            if (doc.to_dict().get(threshold_metric) or -1) > threshold
        ]
    tickers = [{
        "ticker": doc.to_dict().get("ticker", ""),
        "rank": cnt,
        "info": doc.to_dict()
    } for cnt, doc in enumerate(trending[0:limit])]

    return jsonify({
        "success": True,
        "quantity": len(tickers),
        "tickers": tickers,
        "time_taken": time.time() - start_time
    })
Example #3
0
def get_history():
    start_time = time.time()
    ticker = request.args.get("ticker")
    metric = request.args.get("metric")
    if not ticker:
        return jsonify({
            "success":
            False,
            "time_taken":
            time.time() - start_time,
            "error":
            "please provide a ticker in the 'ticker' params"
        })
    permitted_metrics = [
        "AHI", "SGP", "RHI", "sentiment", "tweet_sentiment", "tweet_mentions",
        "reddit_post_sentiment", "reddit_post_mentions",
        "reddit_comment_sentiment", "reddit_comment_mentions",
        "stocktwits_post_sentiment", "stocktwits_post_mentions",
        "yahoo_finance_comment_sentiment", "yahoo_finance_comment_mentions"
    ]
    if metric not in permitted_metrics:
        return jsonify({
            "success":
            False,
            "time_taken":
            time.time() - start_time,
            "error":
            f"please provided a permitted metric (one of {permitted_metrics})"
        })

    history_doc = db().collection('tickers').document(ticker).collection(
        'history').document(metric).get()
    if not history_doc.exists:
        return jsonify({
            "success":
            False,
            "time_taken":
            time.time() - start_time,
            "error":
            "history not found - either the ticker does not exist or the history has not been calculated yet"
        })
    history = history_doc.to_dict()["history"]

    return jsonify({
        "success": True,
        "time_taken": time.time() - start_time,
        "result": history
    })
Example #4
0
def level_1_analysis(event, context):
    start_time = time.time()

    ticker = base64.b64decode(event['data']).decode('utf-8')
    raw_data = [doc.to_dict() for doc in get_raw_data(ticker)]
    datatypes = [
        "reddit_comment", "reddit_post", "tweet", "stocktwits_post",
        "yahoo_finance_comment"
    ]
    updated_fields = {}

    for dt in datatypes:
        sentiment = calculate_sentiment(raw_data, dt)
        mentions = calculate_mentions(raw_data, dt)
        updated_fields[dt + "_sentiment"] = sentiment
        updated_fields[dt + "_sentiment_timestamp"] = time.time()
        updated_fields[dt + "_mentions"] = mentions
        updated_fields[dt + "_mentions_timestamp"] = time.time()

        # update histories
        db().collection('tickers').document(ticker).collection(
            'history').document(dt + '_mentions').set(
                {
                    "history":
                    firestore.ArrayUnion([{
                        "timestamp": time.time(),
                        "data": mentions
                    }])
                },
                merge=True)
        db().collection('tickers').document(ticker).collection(
            'history').document(dt + '_sentiment').set(
                {
                    "history":
                    firestore.ArrayUnion([{
                        "timestamp": time.time(),
                        "data": sentiment
                    }])
                },
                merge=True)

    # finally we add the yahoo finance data
    yahoo_finance_fields = [
        "quoteType", "bid", "previousClose", "marketCap", "industry", "sector",
        "logo_url"
    ]
    info = yf.Ticker(ticker).info
    for field in yahoo_finance_fields:
        updated_fields[field] = info.get(field, "na")

    # update the main ticker document
    db().collection('tickers').document(ticker).set(updated_fields, merge=True)

    return jsonify({"success": True, "time_taken": time.time() - start_time})
Example #5
0
def calculate_rhi(ticker):
    ahi_history = db()\
        .collection('tickers')\
        .document(ticker)\
        .collection('history')\
        .document('AHI')\
        .get()\
        .to_dict()["history"]
    day_ago = time.time() - 3600 * 24
    week_ago = time.time() - 3600 * 24 * 7
    last_day = [dp["data"] for dp in ahi_history if dp["timestamp"] > day_ago]
    last_week = [
        dp["data"] for dp in ahi_history if dp["timestamp"] > week_ago
    ]

    if sum(last_day) == 0 or sum(last_week) == 0:
        return None
    day_average = sum(last_day) / len(last_day)
    week_average = sum(last_week) / len(last_week)
    return day_average / week_average
Example #6
0
def level_2_analysis(event, context):
    start_time = time.time()
    ticker = base64.b64decode(event['data']).decode('utf-8')
    updated_fields = {}
    document = db().collection('tickers').document(ticker).get().to_dict()

    updated_fields["previous_AHI"] = document["AHI"]
    updated_fields["previous_sentiment"] = document["sentiment"]

    ahi = calculate_ahi(document)
    sentiment = calculate_sentiment(document)
    updated_fields["AHI"] = ahi
    updated_fields["AHI_timestamp"] = time.time()
    updated_fields["sentiment"] = sentiment
    updated_fields["sentiment_timestamp"] = time.time()
    db().collection('tickers')\
        .document(ticker)\
        .collection('history')\
        .document('AHI').set({
        "history": firestore.ArrayUnion([{
            "timestamp": time.time(),
            "data": ahi
        }])
    }, merge=True)
    db().collection('tickers') \
        .document(ticker) \
        .collection('history') \
        .document('sentiment').set({
        "history": firestore.ArrayUnion([{
            "timestamp": time.time(),
            "data": sentiment
        }])
    }, merge=True)

    # these two functions must be run after the upload of AHI and sentiment
    rhi = calculate_rhi(ticker)
    sgp = calculate_sgp(ticker)
    updated_fields["RHI"] = rhi
    updated_fields["RHI_timestamp"] = time.time()
    updated_fields["SGP"] = sgp
    updated_fields["SGP_timestamp"] = time.time()
    db().collection('tickers') \
        .document(ticker) \
        .collection('history') \
        .document('RHI').set({
        "history": firestore.ArrayUnion([{
            "timestamp": time.time(),
            "data": rhi
        }])
    }, merge=True)
    db().collection('tickers') \
        .document(ticker) \
        .collection('history') \
        .document('SGP').set({
        "history": firestore.ArrayUnion([{
            "timestamp": time.time(),
            "data": sgp
        }])
    }, merge=True)

    db().collection('tickers').document(ticker).set(updated_fields, merge=True)

    return jsonify({"success": True, "time_taken": time.time() - start_time})
Example #7
0
import time
from firebase_db import db

ahi_targets = db().collection('targets').document('ahi').get().to_dict()

datatypes = [
    "tweet", "reddit_comment", "reddit_post", "stocktwits_post",
    "yahoo_finance_comment"
]


def calculate_ahi(document):
    ahi = 0
    total_weight = 0
    for dt in datatypes:
        mentions = document.get(dt + "_mentions", 0)
        weight = ahi_targets.get(dt + "_weight", 0)
        benchmark = ahi_targets.get(dt + "_benchmark")
        if not benchmark:
            print(f"no ahi benchmark found for datatype {dt}")
        ahi += weight * mentions / benchmark
        total_weight += weight
    if total_weight != 0:
        return ahi / total_weight
    else:
        return 0


def calculate_sentiment(document):
    total_sentiment = 0
    total_weight = 0