def generate_report(): dd_object = DailyDiscussion() alpha_vantage = AlphaVantageAPI() tickers = dd_object.get_daily_discussion(for_date=dt.date.today()) temp_dict = {k: v['count'] for (k, v) in tickers[0].get('tickers').items() if k != 'misc'} c = collections.Counter(temp_dict) report = Reporter() report.set_title('Markov Chain Report') for tuple_c in c.most_common(20): ticker = tuple_c[0] mc = MarkovChain(ticker) mc.build_markov_chain() x, y = mc.predict() if x is None or y is None: continue mu = mc.expected_value(x, y) var = mc.var(x, y) prev_day_change = alpha_vantage.get_quote(ticker) prev_day_change = prev_day_change['Global Quote']['10. change percent'] ds = DataSet() ds.set_x(x.tolist()) ds.append_y_set({'data': y.tolist(), 'label': ''}) report.append_to_body(ChartBuilder(title='Markov Chain: ' + ticker, chart_type='line', data_set=ds, y_label=ticker)) report.append_to_body(HTMLUtil.wrap_in_tag("Expected Value: " + str(round(mu, 3)) + "%", 'p')) report.append_to_body(HTMLUtil.wrap_in_tag("Variance: " + str(round(var, 3)) + "%", 'p')) report.append_to_body(HTMLUtil.wrap_in_tag("Previous day's change: " + prev_day_change, 'p')) report.compile() return report.title
def compile(self): self.compile_data() self.build_script() self._compiled_html = HTMLUtil.get_indent( 1) + self._base_html + HTMLUtil.get_indent( 1) + self._compiled_script return self._compiled_html
def _compile_template(self): if self.body == '': self.body = '\n\t\t<section>\n\t\t\t<p>Empty Report</p>\n\t\t</section>' if self.summary != '': self.summary = HTMLUtil.wrap_in_tag(self.summary, 'p') + '\n' self.body = HTMLUtil.wrap_in_tag(self.body, 'main', indent=1, one_line=False) return self.template.replace('$$__TITLE__$$', self.title, 2)\ .replace('$$__BODY__$$', self.summary + str(self.body))\ .replace('$$__HEAD__$$', '')
def render(self, indent=1): if len(self._class_list) > 0: self._attrs[Attributes.CLASS] = ' '.join(self._class_list) if len(self._children) == 0: return HTMLUtil.wrap_in_tag(self._text, self._node_type, indent, self._attrs, one_line=True) child_markup = '' for child in self._children: if isinstance(child, HTMLElement): child_markup += child.render(indent=indent + 1) else: child_markup += str(child) return HTMLUtil.wrap_in_tag(child_markup, self._node_type, indent, self._attrs, one_line=False)
def get_ticker_table(tickers_found, lookup_thresh=3, force_reload=False): table_header = ['Ticker', 'Mentions', 'Name', 'Description', 'Movement', 'Links'] norm_factor = 0 for tf in tickers_found: norm_factor += tickers_found[tf]['count'] table_values = [] for tf in tickers_found: addendum = '' counter = 0 for submission in tickers_found[tf]['submissions']: addendum += LinkBuilder('[%d] - %d' % (counter, submission['score']), 'https://www.reddit.com' + submission['link']).compile() + '<br />' counter += 1 addendum = ScrollableDiv(addendum, '5rem').compile() desc = '...' if 'description' in tickers_found[tf] and tickers_found[tf]['description'] is not None: desc = tickers_found[tf]['description'] if tickers_found[tf]['count'] >= lookup_thresh - 1: print('crawling AV for %s' % tf) pct_change = AlphaVantageAPI().get_parsed_quote(tf, force_reload)['10. change percent'] pct_in_tag = HTMLUtil.wrap_in_tag(pct_change, 'div', attributes={'class': 'negative' if '-' in pct_change else 'positive'}) else: pct_in_tag = 'N/A' table_values.append([tf, tickers_found[tf]['count'], tickers_found[tf]['name'], desc[:200] + '...', pct_in_tag, addendum]) table_values.sort(key=Sorting.sort_by_mentions, reverse=True) return TableBuilder(headers=table_header, rows=table_values)
def __init__(self, chart_type='bar', title='', data_set=None, x_label='', y_label=''): if data_set is None: data_set = DataSet() data_set.set_x([]) data_set.set_ys([{'label': 'no data', 'data': []}]) elif isinstance(data_set, dict): data_set = DataSet(from_data=data_set) self._raw_data_set = data_set.get_data_dict() self._ys = self._raw_data_set['ys'] self._x = self._raw_data_set['x'] self._type = chart_type self._title = title self._id = title.replace(' ', '_', 100) self._compiled_html = '' self._compiled_script = '' self._data_string = '' self._base_html = HTMLUtil.wrap_in_tag('', 'canvas', indent=3, attributes={'id': self._id}) self._color_index = -1 self._x_label = x_label self._y_label = y_label
def resolve_options(opts): options_str = '' for key in opts: options_str += '"' + key + '": ' + BaseChart.resolve_value( opts[key]) + ',' + HTMLUtil.get_indent(6) return options_str
def index(): report_path = path.join(client_path, 'compiled') # read out and sort all compiled reports (ignore the README) files = [ f for f in listdir(report_path) if path.isfile(path.join(report_path, f)) and f != "README.md" ] files.sort() file_links = [ LinkBuilder(text=f.replace('.html', ''), url='/report?name=' + f) for f in files ] file_list = ListBuilder(list_items=file_links, list_header='Your Reports') buttons = [ ButtonBuilder(text='New ' + b + ' Report', button_id=b, attrs={"data-type": b}) for b in valid_report_types ] button_list = ListBuilder(list_items=buttons, list_header=' Generate Reports') template = HTMLUtil.get_template('index.html')\ .replace('$$__REPORTS__$$', file_list.compile())\ .replace('$$__GEN_REPORTS__$$', button_list.compile()) return template
def compile_data(self): self._data_string = '' for data in self._raw_data_set['ys']: color = self.resolve_data_color(data) self._data_string += HTMLUtil.get_indent(7) + self.get_data_set_template()\ .replace(TITLE_FORMAT, data['label'])\ .replace(DATA_FORMAT, str(data['data']))\ .replace(COLOR_PATTERN, color, 2)\ .replace(Y_AX_ID, "'A'") for data in self._raw_data_set['secondary_ys']: color = self.resolve_data_color(data) self._data_string += HTMLUtil.get_indent(7) + self.get_data_set_template() \ .replace(TITLE_FORMAT, data['label']) \ .replace(DATA_FORMAT, str(data['data'])) \ .replace(COLOR_PATTERN, color, 2) \ .replace(Y_AX_ID, "'B'") self._data_string = self._data_string[:-1]
def compile_data(self): self._data_string = '' for data in self._ys: color = self.resolve_data_color(data) self._data_string += HTMLUtil.get_indent(7) + self.get_data_set_template()\ .replace(TITLE_FORMAT, data['label'])\ .replace(DATA_FORMAT, str(data['data']))\ .replace(COLOR_PATTERN, color, 2) self._data_string = self._data_string[:-1]
def compile_data(self): self._data_string = '' for data in self._ys: if 'color' in data: color = data['color'] else: color = str([self.get_next_color() for _ in range(0, len(data['data']))]).replace("'", "", 100) self._data_string += HTMLUtil.get_indent(7) + DATA_SET_TEMPLATE.replace(TITLE_FORMAT, data['label']) \ .replace(DATA_FORMAT, str(data['data'])) \ .replace(COLOR_FORMAT, str(color))
def append_to_body(self, html, section_id=""): if section_id == "": self.body += '\n\t\t<br />' + HTMLUtil.wrap_in_tag(html, 'section', indent=2, one_line=False) else: self.body += '\n\t\t<br />' + HTMLUtil.wrap_in_tag(html, 'section', indent=2, one_line=False, attributes={"id": section_id})
def set_body(self, body): self.body = HTMLUtil.wrap_in_tag(body, 'section', indent=2, one_line=False)
def report(): report_file = request.args.get('name', default='NOT_FOUND', type=str) return HTMLUtil.get_report(report_file)
def generate_report(): # gather the current hot 1000 posts from WSB reddit = RedditAPI() submissions = reddit.get_hot('wallstreetbets', limit=1000) # loop through and try to pick out tickers tickers_found = {} for sub in range(0, len(submissions['title'])): title = submissions['title'][sub] self_text = submissions['body'][sub] url = submissions['url'][sub] score = submissions['score'][sub] ticker = NLUSubjectTickerEstimator.estimate(title, self_text) if ticker is None: continue ticker_symbol = ticker[0] # if it is, write down information about the ticker and how many times we've seen it if ticker_symbol in tickers_found: tickers_found[ticker_symbol]['count'] += 1 tickers_found[ticker_symbol]['submissions'].append({ 'link': url, 'title': title, 'score': score }) else: tickers_found[ticker_symbol] = { 'count': 1, 'submissions': [{ 'link': url, 'title': title, 'score': score }], 'name': LinkBuilder(ticker[1], ticker[-3]), 'description': ticker[2] } # sort the submissions by score for tf in tickers_found: tickers_found[tf]['submissions'].sort(reverse=True, key=Sorting.sort_by_score) # then reformat the result so that we can put it in a tabular format table_values = [] horizontal_x = [] horizontal_y = [] mentions_to_movement = [] for tf in tickers_found: addendum = '' counter = 0 for submission in tickers_found[tf]['submissions']: addendum += LinkBuilder('[%d] - %d' % (counter, submission['score']), 'https://reddit.com' + submission['link']).compile() + \ '<br />' counter += 1 addendum = ScrollableDiv(addendum, '5rem').compile() desc = '...' if 'description' in tickers_found[tf] and tickers_found[tf][ 'description'] is not None: desc = tickers_found[tf]['description'] if tickers_found[tf]['count'] > 2: print('crawling AV for %s' % tf) pct_change = AlphaVantageAPI().get_parsed_quote(tf).get( '10. change percent') if pct_change is None: pct_change = "0%" horizontal_y.append(pct_change.replace('%', '')) horizontal_x.append(tf) mentions_to_movement.append({ 'data': [{ 'x': tickers_found[tf]['count'], 'y': pct_change.replace('%', '') }], 'label': tf }) pct_in_tag = HTMLUtil.wrap_in_tag( pct_change, 'div', attributes={ 'class': 'negative' if '-' in pct_change else 'positive' }) else: pct_in_tag = 'N/A' table_values.append([ tf, tickers_found[tf]['count'], tickers_found[tf]['name'], desc[:200] + '...', pct_in_tag, addendum ]) horizontal_ds = DataSet() horizontal_ds.set_x(horizontal_x) horizontal_ds.append_y_set({'label': 'Sample', 'data': horizontal_y}) table_values.sort(key=Sorting.sort_by_mentions, reverse=True) # and then mutate the data again to match the data format for bar charts x = [] y = [] for arr in table_values: x.append(arr[0]) y.append(arr[1]) ds = DataSet() ds.set_x(x) ds.append_y_set({'data': y, 'label': ''}) # Build the actual report from our parsed data report = Reporter() report.set_title('NLU Report') # set up a table table_header = [ 'Ticker', 'Mentions', 'Name', 'Description', 'Movement', 'Links' ] report.set_body(TableBuilder(headers=table_header, rows=table_values)) # and a chart report.append_to_body( ChartBuilder(title='WSB Mentions', chart_type='bar', data_set=ds, y_label='# of Mentions')) # and another chart report.append_to_body( ChartBuilder(title='Stock % Change', chart_type='horizontal-bar', data_set=horizontal_ds, x_label='% Change')) # and another ds_scatter = DataSet() ds_scatter.set_ys(mentions_to_movement) report.append_to_body( ChartBuilder(title='Mentions vs Movement', chart_type='scatter', data_set=ds_scatter, x_label='Mentions', y_label='Movement (%)')) report.compile() return report.title
def get_stock_search(): return HTMLUtil.get_template('stock-search.html')