コード例 #1
0
def generate_report():
    # lets examine statistics for the last 3 weeks of 3 large indices (Nasdaq, S+P 500, DJIA)
    for_date = date.today().toordinal()
    spy = HistoricAlphaVantageAPI().get_data_window('SPY', for_date, 15)
    dow = HistoricAlphaVantageAPI().get_data_window('DOW', for_date, 15)
    qqq = HistoricAlphaVantageAPI().get_data_window('QQQ', for_date, 15)

    print(spy)
    print(dow)
    print(qqq)

    net_change = []
    dates = []
    sentiment = []
    for index in range(len(spy)):
        dates.append(str(date.fromordinal(spy[index]['date'])))
        net_change.append(get_net_change(spy[index], dow[index], qqq[index]))

        dd = DailyDiscussion().get_daily_discussion(spy[index]['date'])
        sentiment.append(
            SentimentAnalyzer.get_average_discussion_sentiment(dd))

    print(sentiment)
    # get the average sentiment ratio over the time period for normalization
    ratio_avg = sum([sent[2] for sent in sentiment]) / len(sentiment)

    # reverse
    dates.reverse()
    sentiment.reverse()
    net_change.reverse()

    ds = DataSet()
    ds.set_x(dates)
    ds.set_y1_name('Index % Change')
    ds.append_y_set({'data': net_change, 'label': 'Major Indices Avg Change'})
    ds.set_y2_name('Sentiment Ratio')
    ds.append_secondary_axis_y_set({
        'data': [sent[2] for sent in sentiment],
        'label': 'WSB Sentiment'
    })
    ds.append_secondary_axis_y_set({
        'data': [ratio_avg] * len(dates),
        'label': 'Average Sentiment'
    })

    chart = ChartBuilder(title='Major Index Change vs Sentiment',
                         data_set=ds,
                         x_label='Date',
                         chart_type='line')

    # Build the actual report from our parsed data
    report = Reporter()
    report.set_title('DD Sentiment Report')
    report.set_body(chart)
    report.compile()
コード例 #2
0
def generate_report():
    # Build the actual report from our parsed data
    report = Reporter()
    report.set_title('Historic Sensitivity Report')

    # set up a table
    table_header = ['Ticker', 'Sentiment', '10 day Result']
    report.set_body(TableBuilder(headers=table_header, rows=table_values))

    report.compile()

    return report.title
コード例 #3
0
from api.local_stocks.Ticker import Ticker
from client.util.html.ListBuilder import ListBuilder
from client.Reporter import Reporter

t = Ticker()
stocks = t.get_n_stocks(n=10)
values = []

for stock in stocks:
    values.append(stock[0] + ' - ' + stock[1])

r = Reporter()
r.set_title('Sample Stock List Report')
r.set_body(ListBuilder(values, list_header="Stock tickers").compile())
r.compile()
コード例 #4
0
from api.local_stocks.Ticker import Ticker
from client.util.html.TableBuilder import TableBuilder
from client.Reporter import Reporter

t = Ticker()
acb = t.get_stock('ACB')
hexo = t.get_stock('HEXO')
tlry = t.get_stock('TLRY')
values = [acb[:3], hexo[:3], tlry[:3]]

r = Reporter()
r.set_title('Sample Stock Report')
r.set_body(
    TableBuilder(['Ticker', 'Company Name', 'Description'], values).compile())
r.compile()
コード例 #5
0
t = Ticker()
stocks = t.get_n_stocks(n=10)

count = 0
values = []
for stock in stocks:
    values.append({
        'label': '$' + stock[0],
        'data': [count, count + 5, count + 10, count + 5, count]
    })

    count += 1

data = {'ys': values}

r = Reporter()
r.set_title('Sample Chart Report')
r.set_body(
    ChartBuilder(chart_type='line', title='Sample Line Chart', data_set=data))

bar_data = {'ys': data['ys'][:3]}
r.append_to_body(
    ChartBuilder(chart_type='bar', title='Sample Bar Chart',
                 data_set=bar_data))

# Note that pie charts require x labels for its data, and can only accept one data set, otherwise weird things happen
pie_data = {
    'x': ['One', 'Two', 'Three', 'Four', 'Five'],
    'ys': data['ys'][2:3]
}
r.append_to_body(
コード例 #6
0
    table_values.append([
        ticker, tickers_found[ticker]['count'], tickers_found[ticker]['name'],
        desc[:200] + '...', addendum
    ])

table_values.sort(key=sort_by_mentions, reverse=True)

# and then mutate the data again to match the data format for bar charts
x = []
y = []
for arr in table_values:
    x.append(arr[0])
    y.append(arr[1])

ds = DataSet()
ds.set_x(x)
ds.append_y_set({'data': y, 'label': ''})

# Build the actual report from our parsed data
report = Reporter()
report.set_title('Sample Wall Street Bets Report')

# set up a table
table_header = ['Ticker', 'Mentions', 'Name', 'Description', 'Links']
report.set_body(TableBuilder(headers=table_header, rows=table_values))

# and a chart
report.append_to_body(
    ChartBuilder(title='WSB Mentions', chart_type='bar', data_set=ds))
report.compile()
コード例 #7
0
def generate_report():
    dd_object = DailyDiscussion()
    alpha_vantage = AlphaVantageAPI()
    tickers = dd_object.get_daily_discussion(for_date=dt.date.today())
    temp_dict = {k: v['count'] for (k, v) in tickers[0].get('tickers').items() if k != 'misc'}
    c = collections.Counter(temp_dict)
    report = Reporter()
    report.set_title('Markov Chain Report')
    for tuple_c in c.most_common(20):
        ticker = tuple_c[0]
        mc = MarkovChain(ticker)
        mc.build_markov_chain()
        x, y = mc.predict()
        if x is None or y is None:
            continue
        mu = mc.expected_value(x, y)
        var = mc.var(x, y)
        prev_day_change = alpha_vantage.get_quote(ticker)
        prev_day_change = prev_day_change['Global Quote']['10. change percent']
        ds = DataSet()
        ds.set_x(x.tolist())
        ds.append_y_set({'data': y.tolist(), 'label': ''})
        report.append_to_body(ChartBuilder(title='Markov Chain: ' + ticker, chart_type='line', data_set=ds, y_label=ticker))
        report.append_to_body(HTMLUtil.wrap_in_tag("Expected Value: " + str(round(mu, 3)) + "%", 'p'))
        report.append_to_body(HTMLUtil.wrap_in_tag("Variance: " + str(round(var, 3)) + "%", 'p'))
        report.append_to_body(HTMLUtil.wrap_in_tag("Previous day's change: " + prev_day_change, 'p'))
    report.compile()
    return report.title
def generate_report(table_date=None):
    reddit = RedditAPI()
    epochs = reddit.get_new('wallstreetbets', limit=1000, cast_to='epoch')

    epoch_meta = {}
    most_recent_date = None
    epoch_count = 0
    for epoch in epochs:
        epoch_count += 1
        if most_recent_date is None:
            most_recent_date = epoch

        epoch_data = epochs[epoch]

        if epoch == table_date:
            print(epoch_data)

        tickers_found = {}
        total_posts = 0
        for sub in epoch_data:
            total_posts += 1
            ticker = NLUSubjectTickerEstimator.estimate(
                sub['title'], sub['body'])

            if ticker is None:
                continue

            ticker_symbol = ticker[0]
            url = sub['url']
            title = sub['title']
            score = sub['score']

            # if it is, write down information about the ticker and how many times we've seen it
            if ticker_symbol in tickers_found:
                tickers_found[ticker_symbol]['count'] += 1
                tickers_found[ticker_symbol]['submissions'].append({
                    'link':
                    url,
                    'title':
                    title,
                    'score':
                    score
                })
            else:
                tickers_found[ticker_symbol] = {
                    'count': 1,
                    'submissions': [{
                        'link': url,
                        'title': title,
                        'score': score
                    }],
                    'name': LinkBuilder(ticker[1], ticker[-3]),
                    'description': ticker[2]
                }

        # sort the tickers we found
        for tf in tickers_found:
            tickers_found[tf]['submissions'].sort(reverse=True,
                                                  key=Sorting.sort_by_score)

        tickers_found = {
            k: v
            for k, v in sorted(tickers_found.items(),
                               key=lambda it: it[1]['count'],
                               reverse=True)
        }

        epoch_meta[epoch] = {
            'tickers': tickers_found,
            'post_count': total_posts
        }

    table = ''
    print(epoch_meta)
    x = []
    y_dict = {}
    ds = DataSet()
    epoch_counter = 0
    for meta in epoch_meta:
        epoch_counter += 1
        metadata = epoch_meta[meta]

        # if you specify a date, allow generating a table for dates in the past
        if table_date is None and meta == most_recent_date:
            table = AlbinsonianHTML.get_ticker_table(
                metadata['tickers'], force_reload=FORCE_CACHE_RELOAD)
        elif table_date is not None and table_date == meta:
            table = AlbinsonianHTML.get_ticker_table(
                metadata['tickers'], force_reload=FORCE_CACHE_RELOAD)

        x.append(meta)
        for ticker in metadata['tickers']:
            ticker_meta = metadata['tickers'][ticker]

            if ticker in y_dict:
                y_dict[ticker].append(ticker_meta['count'])
            else:
                # correct if a ticker was missing for a few epochs, but then blips in
                y_dict[ticker] = [0] * (epoch_counter - 1)
                y_dict[ticker].append(ticker_meta['count'])

        for ticker in y_dict:
            if len(y_dict[ticker]) < epoch_counter:
                y_dict[ticker].append(0)

    x.reverse()
    ds.set_x(x)
    for ticker_ds in y_dict:
        ticker_data = y_dict[ticker_ds]
        if len(ticker_data) < epoch_count or sum(ticker_data) < TRIM_THRESH:
            continue

        ticker_data.reverse()
        ds.append_y_set({"label": ticker_ds, "data": ticker_data})

    report = Reporter()
    report.set_title(REPORT_TITLE)
    report.append_to_body(table, section_id='ticker-table')
    report.append_to_body(
        ChartBuilder(title="Mentions over Time",
                     data_set=ds,
                     chart_type='line'))
    report.compile()

    return report.title
コード例 #9
0
def generate_report():
    # gather the current hot 1000 posts from WSB
    reddit = RedditAPI()
    submissions = reddit.get_hot('wallstreetbets', limit=1000)

    # loop through and try to pick out tickers
    tickers_found = {}
    for sub in range(0, len(submissions['title'])):
        title = submissions['title'][sub]
        self_text = submissions['body'][sub]
        url = submissions['url'][sub]
        score = submissions['score'][sub]

        ticker = NLUSubjectTickerEstimator.estimate(title, self_text)

        if ticker is None:
            continue

        ticker_symbol = ticker[0]

        # if it is, write down information about the ticker and how many times we've seen it
        if ticker_symbol in tickers_found:
            tickers_found[ticker_symbol]['count'] += 1
            tickers_found[ticker_symbol]['submissions'].append({
                'link': url,
                'title': title,
                'score': score
            })
        else:
            tickers_found[ticker_symbol] = {
                'count': 1,
                'submissions': [{
                    'link': url,
                    'title': title,
                    'score': score
                }],
                'name': LinkBuilder(ticker[1], ticker[-3]),
                'description': ticker[2]
            }

    # sort the submissions by score
    for tf in tickers_found:
        tickers_found[tf]['submissions'].sort(reverse=True,
                                              key=Sorting.sort_by_score)

    # then reformat the result so that we can put it in a tabular format
    table_values = []
    horizontal_x = []
    horizontal_y = []
    mentions_to_movement = []
    for tf in tickers_found:
        addendum = ''
        counter = 0
        for submission in tickers_found[tf]['submissions']:
            addendum += LinkBuilder('[%d] - %d' % (counter, submission['score']),
                                    'https://reddit.com' + submission['link']).compile() + \
                        '<br />'
            counter += 1

        addendum = ScrollableDiv(addendum, '5rem').compile()

        desc = '...'
        if 'description' in tickers_found[tf] and tickers_found[tf][
                'description'] is not None:
            desc = tickers_found[tf]['description']

        if tickers_found[tf]['count'] > 2:
            print('crawling AV for %s' % tf)
            pct_change = AlphaVantageAPI().get_parsed_quote(tf).get(
                '10. change percent')
            if pct_change is None:
                pct_change = "0%"

            horizontal_y.append(pct_change.replace('%', ''))

            horizontal_x.append(tf)
            mentions_to_movement.append({
                'data': [{
                    'x': tickers_found[tf]['count'],
                    'y': pct_change.replace('%', '')
                }],
                'label':
                tf
            })

            pct_in_tag = HTMLUtil.wrap_in_tag(
                pct_change,
                'div',
                attributes={
                    'class': 'negative' if '-' in pct_change else 'positive'
                })
        else:
            pct_in_tag = 'N/A'

        table_values.append([
            tf, tickers_found[tf]['count'], tickers_found[tf]['name'],
            desc[:200] + '...', pct_in_tag, addendum
        ])

    horizontal_ds = DataSet()
    horizontal_ds.set_x(horizontal_x)
    horizontal_ds.append_y_set({'label': 'Sample', 'data': horizontal_y})

    table_values.sort(key=Sorting.sort_by_mentions, reverse=True)

    # and then mutate the data again to match the data format for bar charts
    x = []
    y = []
    for arr in table_values:
        x.append(arr[0])
        y.append(arr[1])

    ds = DataSet()
    ds.set_x(x)
    ds.append_y_set({'data': y, 'label': ''})

    # Build the actual report from our parsed data
    report = Reporter()
    report.set_title('NLU Report')

    # set up a table
    table_header = [
        'Ticker', 'Mentions', 'Name', 'Description', 'Movement', 'Links'
    ]
    report.set_body(TableBuilder(headers=table_header, rows=table_values))

    # and a chart
    report.append_to_body(
        ChartBuilder(title='WSB Mentions',
                     chart_type='bar',
                     data_set=ds,
                     y_label='# of Mentions'))

    # and another chart
    report.append_to_body(
        ChartBuilder(title='Stock % Change',
                     chart_type='horizontal-bar',
                     data_set=horizontal_ds,
                     x_label='% Change'))

    # and another
    ds_scatter = DataSet()
    ds_scatter.set_ys(mentions_to_movement)
    report.append_to_body(
        ChartBuilder(title='Mentions vs Movement',
                     chart_type='scatter',
                     data_set=ds_scatter,
                     x_label='Mentions',
                     y_label='Movement (%)'))
    report.compile()

    return report.title