def main(): log.info("Started...") symbol = "KMX" from_date = date(2000,1,1) db = database2() rows = db.get_fundamentals(symbol, from_date) h = historicals( get_market_data_symbol( symbol ) ) for row in rows: close = h.close(row.date) high = h.year_high(row.date) low = h.year_low(row.date) pe = round( close / row.eps ) pe_high = round( high / row.eps ) pe_low = round( low / row.eps ) mkt_cap = round( row.shrs_out * close ) print( "%s %s PE: %d %f / %f PE High: %d %f / %f PE Low: %d %f / %f Mkt Cap: %d %f * %f" % (row.date, row.symbol, pe, close, row.eps, pe_high, high, row.eps, pe_low, low, row.eps, mkt_cap, row.shrs_out, close)) action = input( "Please input y to commit, n to skip, a to abort:" ) if action == "y": row.pe = pe row.pe_high = pe_high row.pe_low = pe_low row.mkt_cap = mkt_cap db.commit() elif action == "n": continue elif action == "a": break log.info("Completed")
def main(): log.info("Started...") symbol = "KMX" from_date = date(2017, 1, 1) db = database2() rows = db.get_fundamentals(symbol, from_date) h = historicals(get_market_data_symbol(symbol)) for row in rows: close = h.close(row.date) high = h.year_high(row.date) low = h.year_low(row.date) pe = round(close / row.eps) pe_high = round(high / row.eps) pe_low = round(low / row.eps) mkt_cap = round(row.shrs_out * close) print( "%s %s PE: %d %f / %f PE High: %d %f / %f PE Low: %d %f / %f Mkt Cap: %d %f * %f" % (row.date, row.symbol, pe, close, row.eps, pe_high, high, row.eps, pe_low, low, row.eps, mkt_cap, row.shrs_out, close)) action = input("Please input y to commit, n to skip, a to abort:") if action == "y": row.pe = pe row.pe_high = pe_high row.pe_low = pe_low row.mkt_cap = mkt_cap db.commit() elif action == "n": continue elif action == "a": break log.info("Completed")
def main(): log.info("Started...") db = database2() rpt = report() formats = [ rpt.CONST_FORMAT_NONE, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY_COLOR] table = [ [ "Category", "Spent", "Projected", "Budget", "Tracking" ], ] append_budget_row( db, table, "Base", [0,2,3,4,5,8,12,96], 65000 ) append_budget_row( db, table, "Rent", [1], 48000 ) append_budget_row( db, table, "Travel", [7], 10000 ) append_budget_row( db, table, "Helper", [9], 12000 ) append_budget_row( db, table, "Monchichi", [94], 12000 ) append_budget_row( db, table, "Deux", [93], 1000 ) append_budget_row( db, table, "Fumi", [11], 5000 ) append_budget_row( db, table, "Mike", [6,10], 5000 ) append_budget_row( db, table, "Special", [95,97,98,99], 0 ) append_budget_row( db, table, "Total", [0,1,2,3,4,5,6,7,8,9,10,11,12,93,94,95,96,97,98,99], 158000 ) recon_projected = calculate_recon_projected( table, [2,3,4,5,7,8], 1, 6, 2, 4 ) table.append( [ "Recon", db.get_ytd_spending_sum(), recon_projected, 158000, 158000 - recon_projected ] ) fumi_projected = calculate_fumi_projected( table, 1, 6, 4 ) table.append( [ "Payout", 0, fumi_projected, 0, 0 ] ) rpt.add_table(table, formats) subject = 'Spending Report - ' + strftime("%Y-%m-%d", localtime()) send_mail_html_self(subject, rpt.get_html()) log.info("Completed")
def main(): log.info("Started...") if path.exists( '/home/scanlom/simfin_data/' ): rmtree( '/home/scanlom/simfin_data/' ) # Set your API-key for downloading data. # If the API-key is 'free' then you will get the free data, # otherwise you will get the data you have paid for. # See www.simfin.com for what data is free and how to buy more. set_api_key('free') # Set the local directory where data-files are stored. # The dir will be created if it does not already exist. set_data_dir('~/simfin_data/') rpt = report() rpt.add_string( simfin_load("income", load_income, simfin_income_by_ticker, delete_simfin_income_by_id, post_simfin_income) ) rpt.add_string( simfin_load("balance", load_balance, simfin_balance_by_ticker, delete_simfin_balance_by_id, post_simfin_balance) ) rpt.add_string( simfin_load("cashflow", load_cashflow, simfin_cashflow_by_ticker, delete_simfin_cashflow_by_id, post_simfin_cashflow) ) subject = 'Blue Lion - Simfin Load - Financials' send_mail_html_self(subject, rpt.get_html()) rpt2 = report() rpt2.add_string( simfin_load_ref_data() ) rpt2.add_string( simfin_load_market_data() ) rpt2.add_string( simfin_load_market_data_historical() ) subject = 'Blue Lion - Simfin Load - Market Data' send_mail_html_self(subject, rpt2.get_html()) log.info("Completed")
def simfin_load_market_data_historical(): log.info("Called simfin_load_market_data...") df = load_shareprices(variant='daily', market='us') json = frame_to_json(df) num_inserted = 0 num_updated = 0 num_no_ref_data = 0 num_no_close = 0 for j in json: log.info("Processing %s" % (j['ticker'])) if j['close'] == None or j['adjClose'] == None: num_no_close += 1 log.info("No close found, skipping") continue ref = ref_data_by_symbol(j['ticker']) if ref == None: num_no_ref_data += 1 log.info("No ref data found, skipping") continue cur = mdh_by_ref_data_id_date(ref['id'],j['date']) if cur == None: num_inserted += 1 post_market_data_historical(j['date'], ref['id'], j['close'], j['adjClose']) else: num_updated += 1 put_market_data_historical(cur['id'], cur['date'], cur['refDataId'], j['close'], j['adjClose']) ret = "market_data_historical: Inserted %d records, Updated %d records, %d no ref data and %d no close" % (num_inserted, num_updated, num_no_ref_data, num_no_close) log.info(ret) return ret
def populate_magic(rpt): log.info("populate_magic called...") rpt.add_string("Screen Magic Top Ten") headlines = [] instruments = ref_data() n = 0 for i in instruments: n = n + 1 if n > 20: break log.info("Requesting headline for " + i['symbol']) headlines.append(headline_by_ticker(i['symbol'])) sorted_headlines = sorted(headlines, reverse=True, key=lambda i: i['magic']) formats = [ rpt.CONST_FORMAT_NONE, rpt.CONST_FORMAT_NONE, rpt.CONST_FORMAT_CCY_COLOR ] table = [] table.append(["Ticker", "Description", "Magic"]) for x in range(10): table.append([ sorted_headlines[x]['ticker'], sorted_headlines[x]['description'], sorted_headlines[x]['magic'] ]) rpt.add_table(table, formats)
def main(): log.info("Started...") conn = connect(config_database_connect) cur = conn.cursor(cursor_factory=DictCursor) backfill( conn, cur, 'balances_history', "insert into balances_history (select '%s', type, value from balances_history where date='%s')" ) backfill( conn, cur, 'index_history', "insert into index_history (select '%s', type, value from index_history where date='%s')" ) backfill( conn, cur, 'divisors_history', "insert into divisors_history (select '%s', type, value from divisors_history where date='%s')" ) backfill( conn, cur, 'portfolio_history', "insert into portfolio_history (select '%s', symbol, value, type, pricing_type, quantity, price from portfolio_history where date='%s')" ) cur.close() conn.close() log.info("Completed")
def last(symbol): retry = 1 sleep( CONST_THROTTLE_SECONDS) # Sleep to avoid AlphaVantage throttling error url = 'https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol=%s&apikey=2YG6SAN57NRYNPJ8' % ( symbol) while retry <= CONST_RETRIES: try: raw_bytes = urlopen(url).read() data = loads(raw_bytes.decode()) last = Decimal(data['Global Quote']['05. price']) return last except Exception as err: if retry >= CONST_RETRIES: log.warning("Unable to retrieve last for %s" % (symbol)) log.info(data) raise err else: log.warning("Unable to retrieve last for %s, retry %d" % (symbol, retry)) retry += 1 sleep( CONST_RETRY_SECONDS ) # For some reason AlphaVantage is not returning, sleep to try and allow them to recover
def main(): log.info("Started...") db = database2() rpt = report() total_roe = db.get_balance(db.CONST_BALANCES_TYPE_TOTAL_ROE) total_finish = CONST_ONE_UNIT / CONST_FINISH_PCT index_roe = db.get_index_history(db.CONST_INDEX_ROE, datetime.today().date()) ytd_base_index_roe = db.get_index_history(db.CONST_INDEX_ROE, db.get_ytd_base_date()) definite_retirement_years = CONST_DEFINITE_RETIREMENT_DATE - datetime.today( ).date() hoped_retirement_years = CONST_HOPED_RETIREMENT_DATE - datetime.today( ).date() # Determine cash made this year profit = total_roe - db.get_balance_history( db.CONST_BALANCES_TYPE_TOTAL_ROE, db.get_ytd_base_date()) - db.get_balance( db.CONST_BALANCES_TYPE_SAVINGS) populate_summary(db, rpt, index_roe, total_roe, total_finish, hoped_retirement_years, profit) populate_summary_super_inflection(db, rpt, index_roe, total_roe, definite_retirement_years) populate_stress_test_twenty_percent_drop(db, rpt, index_roe, total_roe, total_finish) # Send a summary mail subject = "Blue Lion - " + rpt.format_ccy(profit) + " / " + rpt.format_pct( index_roe / ytd_base_index_roe - 1) send_mail_html_self(subject, rpt.get_html()) log.info("Completed")
def main(): log.info("Started...") db = database2() total_self = calculate_total(db, db.CONST_PORTFOLIO_SELF) index_self = calculate_index(db, total_self, db.CONST_INDEX_SELF) total_play = calculate_total(db, db.CONST_PORTFOLIO_PLAY) index_play = calculate_index(db, total_play, db.CONST_INDEX_PLAY) total_managed = calculate_total(db, db.CONST_PORTFOLIO_MANAGED) index_managed = calculate_index(db, total_managed, db.CONST_INDEX_MANAGED) cash = db.get_constituents_by_portfolio_symbol(db.CONST_PORTFOLIO_CASH, db.CONST_SYMBOL_CASH) total_roe = total_self + total_managed + cash total_rotc = total_roe debt = db.get_constituents_by_portfolio_symbol(db.CONST_PORTFOLIO_CASH, db.CONST_SYMBOL_DEBT) total_roe -= debt index_roe = calculate_index(db, total_roe, db.CONST_INDEX_ROE) index_rotc = calculate_index(db, total_rotc, db.CONST_INDEX_ROTC) conn = connect( config_database_connect ) cur = conn.cursor(cursor_factory=DictCursor) # Update index_history with today's values cur.execute("delete from index_history where date=current_date") cur.execute("insert into index_history values (current_date, 1, " + format_ccy_sql(index_self) + ")") cur.execute("insert into index_history values (current_date, 2, " + format_ccy_sql(index_roe) + ")") cur.execute("insert into index_history values (current_date, 3, " + format_ccy_sql(index_rotc) + ")") cur.execute("insert into index_history values (current_date, 4, " + format_ccy_sql(index_managed) + ")") cur.execute("insert into index_history values (current_date, 5, " + format_ccy_sql(index_play) + ")") conn.commit() # Update balances with today's values cur.execute("update balances set value=" + format_ccy_sql(total_roe) + " where type=12") cur.execute("update balances set value=" + format_ccy_sql(total_self) + " where type=13") cur.execute("update balances set value=" + format_ccy_sql(total_managed) + " where type=14") cur.execute("update balances set value=" + format_ccy_sql(total_rotc) + " where type=18") cur.execute("update balances set value=" + format_ccy_sql(total_play) + " where type=19") conn.commit() # Update balances_history with today's values cur.execute("delete from balances_history where date=current_date") cur.execute("insert into balances_history (select current_date, type, value from balances)") conn.commit() # Update portfolio_history with today's values cur.execute("delete from portfolio_history where date=current_date") cur.execute("insert into portfolio_history (select current_date, symbol, value, portfolio_id, pricing_type, quantity, price from constituents)") conn.commit() # Update divisors_history with today's values cur.execute("delete from divisors_history where date=current_date") cur.execute("insert into divisors_history (select current_date, type, value from divisors)") conn.commit() # Close the db cur.close() conn.close() log.info("Completed")
def main(): log.info("Started...") # Test print(cagr(5, 5.10, 0.40, 0.10, 18, 183.43)) # post_simfin_income({'date':'2019-12-13', 'ticker':'Mikey'}) log.info("Completed")
def __init__(self, symbol): self.symbol = symbol retry = 1 while retry <= CONST_RETRIES: try: sleep(CONST_THROTTLE_SECONDS) # Sleep to avoid AlphaVantage throttling error url = 'https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&symbol=%s&outputsize=full&apikey=2YG6SAN57NRYNPJ8' % (symbol) raw_bytes = urlopen(url).read() data_full = loads(raw_bytes.decode(), object_pairs_hook=OrderedDict) sleep(CONST_THROTTLE_SECONDS) # Sleep to avoid AlphaVantage throttling error url = 'https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&symbol=%s&apikey=2YG6SAN57NRYNPJ8' % (symbol) raw_bytes = urlopen(url).read() data_compact = loads(raw_bytes.decode(), object_pairs_hook=OrderedDict) # For outputsize=full, TiME_SERIES_DAILY_ADJUSTED is one week delayed. So we have to get the compact information, # store it, and then add the rest from full self.data_adj_close = OrderedDict() self.data_close = OrderedDict() self.data_high = OrderedDict() self.data_low = OrderedDict() for key, value in data_compact['Time Series (Daily)'].items(): if sanity_check_historical_data(key, value): self.data_adj_close[key] = Decimal( value['5. adjusted close'] ) self.data_close[key] = Decimal( value['4. close'] ) self.data_high[key] = Decimal( value['2. high'] ) self.data_low[key] = Decimal( value['3. low'] ) for key, value in data_full['Time Series (Daily)'].items(): if sanity_check_historical_data(key, value): if not (key in self.data_adj_close): self.data_adj_close[key] = Decimal( value['5. adjusted close'] ) if not (key in self.data_close): self.data_close[key] = Decimal( value['4. close'] ) if not (key in self.data_high): self.data_high[key] = Decimal( value['2. high'] ) if not (key in self.data_low): self.data_low[key] = Decimal( value['3. low'] ) self.data_adj_close = list( self.data_adj_close.items() ) self.data_close = list( self.data_close.items() ) self.data_high = list( self.data_high.items() ) self.data_low = list( self.data_low.items() ) break except Exception as err: if retry >= CONST_RETRIES: log.error( "Unable to retrieve historicals for %s" % (self.symbol) ) log.info( data_compact ) raise err else: log.warning( "Unable to retrieve historicals for %s, retry %d" % (self.symbol, retry) ) retry += 1 sleep(CONST_RETRY_SECONDS) # For some reason AlphaVantage is not returning, sleep to try and allow them to recover
def main(): log.info("Started...") db = database2() rpt = report() CONST_PORTFOLIO = 1 CONST_START = date(2020, 1, 21) CONST_END = date(2017, 1, 1) contributors = {} start = db.get_portfolio_history_by_date(CONST_PORTFOLIO, CONST_START) for row in start: contributors[row.symbol] = [row.symbol, row.value, 0, 0, 0, 0, 0, 0] end = db.get_portfolio_history_by_date(CONST_PORTFOLIO, CONST_END) for row in end: if row.symbol in contributors: contributors[row.symbol][5] = row.value else: contributors[row.symbol] = [ row.symbol, 0, 0, 0, 0, row.value, 0, 0 ] divs = db.get_actions_by_date_range_type( CONST_START, CONST_END, db.CONST_ACTION_TYPE_DIVIDEND_PORTFOLIO) for row in divs: if row.symbol in contributors: contributors[row.symbol][4] += row.value1 buys = db.get_actions_by_date_range_type( CONST_START, CONST_END, db.CONST_ACTION_TYPE_BOUGHT_PORTFOLIO) for row in buys: if row.symbol in contributors: contributors[row.symbol][2] += row.value1 sells = db.get_actions_by_date_range_type( CONST_START, CONST_END, db.CONST_ACTION_TYPE_SOLD_PORTFOLIO) for row in sells: if row.symbol in contributors: contributors[row.symbol][3] += row.value1 for key, value in contributors.items(): value[6] = value[3] + value[4] + value[5] - value[1] - value[2] value[7] = (value[1] + value[2] + value[6]) / (value[1] + value[2]) - 1 formats = [ rpt.CONST_FORMAT_NONE, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY_COLOR, rpt.CONST_FORMAT_PCT_COLOR ] table = [] for key, value in contributors.items(): table.append(value) table.sort(key=lambda a: a[7] * -1) table.insert(0, ["Symbol", "Start", "Buy", "Sell", "Div", "End", "Diff", "%"]) rpt.add_table(table, formats) subject = 'Blue Lion - Contributors' send_mail_html_self(subject, rpt.get_html()) log.info("Completed")
def main(): log.info("Started...") # Test print(last('MSFT')) # foo = historicals('BAS.F') # print( foo.change_ten_years()[0] ) log.info("Completed")
def main(): log.info("Started...") db = database2() rpt = report() populate_max_movers( db, rpt ) subject = 'Health Check - ' + strftime("%Y-%m-%d", localtime()) send_mail_html_self(subject, rpt.get_html()) log.info("Completed")
def main(): log.info("Started...") # Test print( last('WFC') ) foo = historicals('3030.T') print( foo.change_ten_years()[0] ) log.info("Completed")
def main(): log.info("Started...") db = database2() rpt = report() populate_five_cagr(db, rpt) populate_magic(rpt) subject = 'Blue Lion - Search' send_mail_html_self(subject, rpt.get_html()) log.info("Completed")
def main(): log.info("Started...") # Test table = [ [ "", "Mike", "Dan" ], [ 5.555555,6.66666,7.77777 ]] formats = [ report.CONST_FORMAT_NONE, report.CONST_FORMAT_CCY_INT_COLOR, report.CONST_FORMAT_PCT ] r = report() r.add_table(table, formats) print(r.get_html()) log.info("Completed")
def main(): log.info("Started job_yearly.py...") conn = connect( config_database_connect ) cur = conn.cursor(cursor_factory=DictCursor) cur.execute("update balances set value=0 where type=15") # Paid cur.execute("update balances set value=0 where type=16") # Tax cur.execute("update balances set value=0 where type=17") # Savings conn.commit() log.info("Completed")
def main(): log.info("Started...") conn = connect( config_database_connect ) cur = conn.cursor(cursor_factory=DictCursor) backfill(conn, cur, 'balances_history', "insert into balances_history (select '%s', type, value from balances_history where date='%s')" ) backfill(conn, cur, 'index_history', "insert into index_history (select '%s', type, value from index_history where date='%s')" ) backfill(conn, cur, 'divisors_history', "insert into divisors_history (select '%s', type, value from divisors_history where date='%s')" ) backfill(conn, cur, 'portfolio_history', "insert into portfolio_history (select '%s', symbol, value, type, pricing_type, quantity, price from portfolio_history where date='%s')" ) cur.close() conn.close() log.info("Completed")
def main(): log.info("Started...") # Test db = database2() val = db.get_balance(database2.CONST_BALANCES_TYPE_TOTAL_ROE) print(val) print(db.get_ytd_base_date()) db.session.query(db.IndexHistory).filter(db.IndexHistory.type == 2, db.IndexHistory.date == '08/14/2018').one() print(db.get_ytd_spending_sum([ 0,2,3,4,5,8,12,96 ])) print(db.get_stocks()) log.info("Completed")
def backfill(conn, cur, table, sql_template): cur.execute( 'select max(date) from %s' % ( table ) ) rows = cur.fetchall() existing_date = rows[0][0]; cur_date = existing_date + timedelta( days=1 ) log.info("Backfilling from existing_date %s in %s" % ( existing_date, table )) while cur_date < cur_date.today(): sql = sql_template % (cur_date, existing_date) cur.execute( sql ); conn.commit() log.info("Backfilled %s" % ( cur_date )) cur_date = cur_date + timedelta( days=1 )
def main(): log.info("Started loading market data...") instruments = ref_data_focus() for i in instruments: try: h = historicals(i['symbolAlphaVantage']) except Exception as err: # Log exceptions as warnings, there often won't be historical data for international names log.warning("Could not get data for %s" % (i['symbol'])) continue log.info("Populating for %s" % (i['symbol'])) post = 0 put = 0 for close, adj_close in zip(h.data_close, h.data_adj_close): if close[0] != adj_close[0]: raise RuntimeError('Date mismatch: close: %s, adj_close: %s' % (close[0], adj_close[0])) mdh = mdh_by_ref_data_id_date(i['id'], close[0]) if mdh == None: post_market_data_historical(close[0], i['id'], close[1], adj_close[1]) post += 1 else: put_market_data_historical(mdh['id'], close[0], i['id'], close[1], adj_close[1]) put += 1 log.info("Posted %d records, Put %d records" % (post, put)) log.info("Completed")
def main(): log.info("Started...") db = database2() date = datetime.now().date() recon = 1 log.info("%s %s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (date, "recon", "pos_gs", "pos_gs_ira", "pos_gs_hkd", "pos_ed", "pos_owe_portfolio", "neg_cash_total", "neg_cash_self", "neg_cash_managed")) while recon != 0: pos_gs = db.get_balance_history(db.CONST_BALANCES_TYPE_GS, date) pos_gs_ira = db.get_balance_history(db.CONST_BALANCES_TYPE_GS_IRA, date) pos_gs_hkd = db.get_balance_history(db.CONST_BALANCES_TYPE_GS_HKD, date) pos_owe_portfolio = db.get_balance_history( db.CONST_BALANCES_TYPE_OWE_PORTFOLIO, date) pos_ed = db.get_balance_history(db.CONST_BALANCES_TYPE_ED, date) neg_cash_total = db.get_portfolio_history(db.CONST_PORTFOLIO_CASH, db.CONST_SYMBOL_CASH, date) neg_cash_self = db.get_portfolio_history(db.CONST_PORTFOLIO_SELF, db.CONST_SYMBOL_CASH, date) neg_cash_managed = db.get_portfolio_history(db.CONST_PORTFOLIO_MANAGED, db.CONST_SYMBOL_CASH, date) recon = (pos_gs + pos_gs_ira + pos_gs_hkd + pos_owe_portfolio + pos_ed) - (neg_cash_total + neg_cash_self + neg_cash_managed) log.info( "%s %s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (date, recon, pos_gs, pos_gs_ira, pos_gs_hkd, pos_owe_portfolio, pos_ed, neg_cash_total, neg_cash_self, neg_cash_managed)) date -= timedelta(1) log.info("Completed")
def populate_market_data(i): try: l = last(i['symbolAlphaVantage']) except Exception as err: # If we can't retrieve market data for one symbol, just continue on to the others. We'll catch that in the tech health checks, as market data update time will be stale log.warning("Unable to retrieve last for %s" % (i['symbol'])) return md = market_data_by_symbol(i['symbol']) if md == None: log.info("POST market_data: %s, %f" % (i['symbol'], l)) post_market_data(i['id'], l) else: log.info("PUT market_data: %s, %f" % (i['symbol'], l)) put_market_data(md['id'], i['id'], l)
def main(): log.info("Started...") db = database2() rpt = report() formats = [ rpt.CONST_FORMAT_NONE, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY, rpt.CONST_FORMAT_CCY_COLOR ] table = [ ["Category", "Spent", "Projected", "Budget", "Tracking"], ] append_budget_row(db, table, "Base", [0, 2, 3, 4, 5, 8, 12, 96], 75000) append_budget_row(db, table, "Rent", [1], 73000) append_budget_row(db, table, "Travel", [7], 10000) append_budget_row(db, table, "Helper", [9], 12000) append_budget_row(db, table, "Monchichi", [94], 12000) append_budget_row(db, table, "Deux", [93], 12000) append_budget_row(db, table, "Fumi", [11], 5000) append_budget_row(db, table, "Mike", [6, 10], 5000) append_budget_row(db, table, "Special", [95, 97, 98, 99], 0) append_budget_row( db, table, "Total", [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 93, 94, 95, 96, 97, 98, 99], CONST_BUDGET) recon_projected = calculate_recon_projected(table, [2, 3, 4, 5, 6, 8, 9], 1, 7, 2, 4) table.append([ "Recon", db.get_ytd_spending_sum(), recon_projected, CONST_BUDGET, CONST_BUDGET - recon_projected ]) fumi_projected = calculate_fumi_projected(table, 1, 7, 4) table.append(["Payout", 0, fumi_projected, 0, 0]) rpt.add_table(table, formats) plan_projected = (recon_projected + CONST_FIXED) / (1 - CONST_TAX_RATE) rpt.add_string("BLRP " + rpt.format_ccy(plan_projected) + "=(" + rpt.format_ccy(recon_projected) + "+" + rpt.format_ccy(CONST_FIXED) + ")/(1-" + rpt.format_ccy(CONST_TAX_RATE) + ")") rpt.add_string("BLRB " + rpt.format_ccy(CONST_ONE_UNIT) + "=(" + rpt.format_ccy(CONST_BUDGET) + "+" + rpt.format_ccy(CONST_FIXED) + ")/(1-" + rpt.format_ccy(CONST_TAX_RATE) + ")") subject = 'Blue Tree - ' + rpt.format_ccy( plan_projected) + ' / ' + rpt.format_ccy(CONST_ONE_UNIT) send_mail_html_self(subject, rpt.get_html()) log.info("Completed")
def main(): log.info("Started...") ## ## Check that the port is the same as on the Gateway ## ipaddress is 127.0.0.1 if one same machine, clientid is arbitrary app = TestApp("127.0.0.1", 7496, 10) app.big_fundamental() current_time = app.speaking_clock() print(current_time) app.disconnect() log.info("Completed")
def main(): log.info("Started loading prices for constituents...") db = database2() rows = db.get_constituents(db.CONST_PRICING_TYPE_BY_PRICE) populate_price_and_value(db, rows, True) log.info("Completed") log.info("Started loading prices for stocks...") rows = db.get_stocks() populate_price_and_value(db, rows, False) log.info("Completed")
def last(symbol): retry = 1 sleep(CONST_THROTTLE_SECONDS) # Sleep to avoid AlphaVantage throttling error url = 'https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&symbol=%s&apikey=2YG6SAN57NRYNPJ8' % (symbol) while retry <= CONST_RETRIES: try: raw_bytes = urlopen(url).read() data = loads(raw_bytes.decode()) last = Decimal( data['Time Series (Daily)'][ data['Meta Data']['3. Last Refreshed'][0:10] ]['5. adjusted close'] ) return last except Exception as err: if retry >= CONST_RETRIES: log.error( "Unable to retrieve last for %s" % (symbol) ) log.info( data ) raise err else: log.warning( "Unable to retrieve last for %s, retry %d" % (symbol, retry) ) retry += 1 sleep(CONST_RETRY_SECONDS) # For some reason AlphaVantage is not returning, sleep to try and allow them to recover
def populate_price_and_value(db, rows, populate_value): for row in rows: log.info("Downloading %s..." % (row.symbol)) fx = 1 if row.symbol in CONST_FX_MAP: fx = CONST_FX_MAP[row.symbol] log.info("Using fx %f" % (fx)) try: row.price = last(row.symbol) if populate_value: row.value = row.price * (Decimal(1) / fx) * row.quantity log.info("Updated %s..." % (row.symbol)) except Exception as err: log.error("Could not get price for %s" % (row.symbol)) log.exception(err) continue log.info("Committing transaction...") db.commit() log.info("Done")
def main(): log.info("Started...") db = database2() rpt = report() rpt.add_heading("Trade") populate_cash(db, rpt) rpt.add_string("") populate_allocations(db, rpt) rpt.add_heading("Upgrade") populate_thirty_pe( db, rpt ) rpt.add_string("") populate_five_cagr(db, rpt) rpt.add_heading("Research") populate_reds(db, rpt) rpt.add_string("") populate_max_movers( db, rpt ) subject = 'Blue Lion - Health Check' send_mail_html_self(subject, rpt.get_html()) log.info("Completed")
def backfill(conn, cur, table, sql_template): # Backfill the past one week, or more if the latest date is older existing_date = datetime.today().date() - CONST_BACKFILL_DELTA cur.execute('select max(date) from %s' % (table)) rows = cur.fetchall() max_date = rows[0][0] if existing_date > max_date: existing_date = max_date cur_date = existing_date + timedelta(days=1) log.info("Backfilling from existing_date %s in %s" % (existing_date, table)) while cur_date < cur_date.today(): cur.execute("select date from %s where date = '%s'" % (table, cur_date)) if cur.rowcount > 0: log.info("Skipping %s, already populated" % (cur_date)) cur_date = cur_date + timedelta(days=1) continue sql = sql_template % (cur_date, existing_date) cur.execute(sql) conn.commit() log.info("Backfilled %s" % (cur_date)) cur_date = cur_date + timedelta(days=1)
def simfin_load_ref_data(): log.info("Called simfin_load_ref_data...") df = load_companies(market='us') json_companies = frame_to_json(df) df = load_industries() json_industries = frame_to_json(df) industries_by_id = { i['industryId'] : i for i in json_industries } num_inserted = 0 num_updated = 0 for c in json_companies: log.info("Processing %s" % (c['ticker'])) sector = "" industry = "" if c['industryId'] in industries_by_id: sector = industries_by_id[c['industryId']]['sector'] industry = industries_by_id[c['industryId']]['industry'] cur = ref_data_by_symbol(c['ticker']) if cur == None: num_inserted += 1 post_ref_data(c['ticker'], c['companyName'], sector, industry) else: num_updated += 1 put_ref_data(cur['id'], cur['symbol'], cur['symbolAlphaVantage'], c['companyName'], sector, industry, cur['active'], cur['focus']) ret = "ref_data: Inserted %d records, Updated %d records" % (num_inserted, num_updated) log.info(ret) return ret
def main(): log.info("Started...") db = database2() d = date(2019, 11, 27) pos_hsbc = db.get_balance_history(db.CONST_BALANCES_TYPE_HSBC, d) pos_vb = db.get_balance_history(db.CONST_BALANCES_TYPE_VIRTUAL_BANK, d) pos_jpy = db.get_balance_history(db.CONST_BALANCES_TYPE_JPY, d) neg_owe_portfolio = db.get_balance_history( db.CONST_BALANCES_TYPE_OWE_PORTFOLIO, d) neg_amex_cx = db.get_balance_history(db.CONST_BALANCES_TYPE_AMEX_CX, d) neg_capital_one = db.get_balance_history( db.CONST_BALANCES_TYPE_CAPITAL_ONE, d) neg_hsbc_visa = db.get_balance_history(db.CONST_BALANCES_TYPE_HSBC_VISA, d) budget_recon = (pos_hsbc + pos_vb + pos_jpy) - ( neg_owe_portfolio + neg_amex_cx + neg_capital_one + neg_hsbc_visa) paid = db.get_balance_history(db.CONST_BALANCES_TYPE_PAID, d) tax = db.get_balance_history(db.CONST_BALANCES_TYPE_TAX, d) savings = db.get_balance_history(db.CONST_BALANCES_TYPE_SAVINGS, d) spending = paid - tax - savings - (budget_recon - CONST_BUDGET_UNIT) recon = spending - db.get_ytd_spending_sum_by_date(d) log.info(recon) log.info("Completed")
def populate_price_and_value(db, rows, populate_value): for row in rows: log.info( "Downloading %s..." % ( row.symbol ) ) try: row.price = last( get_market_data_symbol( row.symbol ) ) if populate_value: row.value = row.price * row.quantity log.info( "Updated %s..." % ( row.symbol ) ) except Exception as err: log.error( "Could not get price for %s" % ( row.symbol ) ) log.exception(err) continue log.info( "Committing transaction..." ) db.commit() log.info( "Done" )
def main(): log.info("Started...") # Clean out the temporary directory if path.exists(config_backup_tmp_dir): rmtree(config_backup_tmp_dir) makedirs(config_backup_tmp_dir) # Copy the db's (finances, fujippi, and wikidb) call("pg_dump finances | split -b 1m - " + config_backup_tmp_dir + "financesbackup", shell=True) call("pg_dump fujippi | split -b 1m - " + config_backup_tmp_dir + "fujippibackup", shell=True) call("pg_dump wikidb -U wikiuser | split -b 1m - " + config_backup_tmp_dir + "wikidbbackup", shell=True) # Copy the directories we want for dir in config_backup_zip_dirs: copytree(dir[0], config_backup_tmp_dir + dir[1]) # Copy the backup to Dropbox for i in reversed(range(0, config_backup_days)): dest = config_dropbox_dir + config_backup + str(i) if i > 0: src = config_dropbox_dir + config_backup + str(i - 1) if path.exists(dest): rmtree(dest) if path.exists(src): rename(src, dest) else: move(config_backup_tmp_dir, dest) log.info("Completed")
def populate_five_cagr(db, rpt): log.info("populate_five_cagr called...") rows = db.session.query(db.Stocks).\ filter(db.Stocks.hidden == False).\ filter(db.Stocks.pe_terminal > 0).\ all() formats = [ rpt.CONST_FORMAT_NONE, rpt.CONST_FORMAT_PCT_COLOR, rpt.CONST_FORMAT_CONFIDENCE ] table = [] for row in rows: log.info("Requesting cagr for " + row.symbol) c = cagr(5, row.eps, row.payout, row.growth, row.pe_terminal, row.price) if c > Decimal(0.10): rowResearch = db.session.query(db.Researches).\ filter(db.Researches.stock_id == row.id).\ order_by(desc(db.Researches.id)).first() if rowResearch == None: table.append([row.symbol, c, CONST_CONFIDENCE_NONE]) else: r = confidence(rowResearch.comment) # If it's a constituent, don't worry about the confidence rowConstituent = db.session.query(db.Constituents).\ filter(db.Constituents.stock_id == row.id).first() if rowConstituent != None: r = CONST_CONFIDENCE_CONSTITUENT if r != CONST_CONFIDENCE_LOW: table.append([row.symbol, c, r]) if len(table) > 1: table.sort(key=lambda a: a[1], reverse=True) table.insert(0, ["Symbol", "5yr CAGR", "Confidence"]) rpt.add_string("Watch List 5yr CAGR > 10%") rpt.add_table(table, formats) else: rpt.add_string("Watch List 5yr CAGR > 10% - None")
def main(): log.info("Started loading market data...") log.info("Loading focus names...") instruments_focus = ref_data_focus() #instruments = ref_data() for i in instruments_focus: populate_market_data(i) #instruments = [x for x in instruments if x['symbolAlphaVantage'] != i['symbolAlphaVantage']] #log.info("Loading blurry names...") #for i in instruments: #populate_market_data(i) log.info("Completed")
def main(): log.info("Started...") db = database2() rows = db.get_stocks() for row in rows: log.info("Downloading %s..." % (row.symbol)) try: h = historicals(get_market_data_symbol(row.symbol)) update_stock_historical_change(row, h, h.CONST_BUSINESS_DAYS_ONE, "day_change", "day_change_date") update_stock_historical_change(row, h, h.CONST_BUSINESS_DAYS_WEEK, "week_change", "week_change_date") update_stock_historical_change(row, h, h.CONST_BUSINESS_DAYS_MONTH, "month_change", "month_change_date") update_stock_historical_change(row, h, h.CONST_BUSINESS_DAYS_THREE_MONTHS, "three_month_change", "three_month_change_date") update_stock_historical_change(row, h, h.CONST_BUSINESS_DAYS_YEAR, "year_change", "year_change_date") update_stock_historical_change(row, h, h.CONST_BUSINESS_DAYS_FIVE_YEARS, "five_year_change", "five_year_change_date") update_stock_historical_change(row, h, h.CONST_BUSINESS_DAYS_TEN_YEARS, "ten_year_change", "ten_year_change_date") except Exception as err: # Log exceptions as warnings, there often won't be historical data for international names log.warning("Could not get data for %s" % (row.symbol)) # log.exception(err) continue log.info("Committing transaction...") db.commit() log.info("Completed")
def main(): log.info("Started...") db = database2() date = datetime.now().date() recon = 1 while recon != 0: pos_gs = db.get_balance_history(db.CONST_BALANCES_TYPE_GS, date) pos_gs_ira = db.get_balance_history(db.CONST_BALANCES_TYPE_GS_IRA, date) pos_gs_hkd = db.get_balance_history(db.CONST_BALANCES_TYPE_GS_HKD, date) pos_owe_portfolio = db.get_balance_history(db.CONST_BALANCES_TYPE_OWE_PORTFOLIO, date) neg_cash_total = db.get_portfolio_history(db.CONST_PORTFOLIO_CASH, db.CONST_SYMBOL_CASH, date) neg_cash_self = db.get_portfolio_history(db.CONST_PORTFOLIO_SELF, db.CONST_SYMBOL_CASH, date) recon = (pos_gs + pos_gs_ira + pos_gs_hkd + pos_owe_portfolio) - (neg_cash_total + neg_cash_self) log.info("%s %s\t%s\t%s\t%s\t%s\t%s\t%s" % (date, recon, pos_gs, pos_gs_ira, pos_gs_hkd, pos_owe_portfolio, neg_cash_total, neg_cash_self)) date -= timedelta(1) log.info("Completed")
filter(db.Constituents.stock_id == db.Stocks.id).\ filter(db.Constituents.portfolio_id == db.CONST_PORTFOLIO_PLAY).\ order_by(db.Stocks.__table__.columns[col].desc()).\ first().stocks table.append( [ col + "_up", row.symbol, getattr(row, col), getattr(row, col + "_date" ) ] ) row = db.session.query(db.Stocks, db.Constituents).\ filter(db.Constituents.stock_id == db.Stocks.id).\ filter(db.Constituents.portfolio_id == db.CONST_PORTFOLIO_PLAY).\ order_by(db.Stocks.__table__.columns[col].asc()).\ first().stocks table.append( [ col + "_down", row.symbol, getattr(row, col), getattr(row, col + "_date" ) ] ) rpt.add_table( table, formats ) def main(): log.info("Started...") db = database2() rpt = report() populate_max_movers( db, rpt ) subject = 'Health Check - ' + strftime("%Y-%m-%d", localtime()) send_mail_html_self(subject, rpt.get_html()) log.info("Completed") if __name__ == '__main__': try: main() except Exception as err: log.exception(err) log.info("Aborted")
if md == None: log.info("POST market_data: %s, %f" % (i['symbol'], l)) post_market_data(i['id'], l) else: log.info("PUT market_data: %s, %f" % (i['symbol'], l)) put_market_data(md['id'], i['id'], l) def main(): log.info("Started loading market data...") log.info("Loading focus names...") instruments_focus = ref_data_focus() #instruments = ref_data() for i in instruments_focus: populate_market_data(i) #instruments = [x for x in instruments if x['symbolAlphaVantage'] != i['symbolAlphaVantage']] #log.info("Loading blurry names...") #for i in instruments: #populate_market_data(i) log.info("Completed") if __name__ == '__main__': try: main() except Exception as err: log.exception(err) log.info("Aborted")
def main(): log.info("Started...") conn = connect( config_database_connect ) cur = conn.cursor(cursor_factory=DictCursor) sql = "select * from accounts_types where download=true"; cur.execute(sql) banks = cur.fetchall() for bank in banks: try: log.info("Downloading: " + " " + bank['description']) GlobalConfig = OfxConfig() a = GlobalConfig.account(bank['id']) ofxdata = a.download(days=0) f = open(path.expanduser('~/tmp/ofxdata.tmp'), 'w') f.write(ofxdata.read()) f.close() f = open(path.expanduser('~/tmp/ofxdata.tmp'), 'r') parsed = OfxParser.parse(f) f.close() log.info("OfxParser complete") positions = {} for pos in parsed.account.statement.positions: positions[pos.security] = round(pos.units * pos.unit_price, 2) log.info("Downloaded: " + str(bank['description']) + " " + str(pos.security)) sql = "select * from accounts where type=" + str(bank['type']); cur.execute(sql) accounts = cur.fetchall() for account in accounts: if account['name'] not in positions: raise Exception('account ' + account['name'] + ' not present in download') log.info( bank['description'] + '\t' + account['name_local'] + '\t' + str(positions[account['name']]) ) sql = "update constituents set value=" + str(positions[account['name']]) + "where symbol='" + account['name_local'] + "'" cur.execute(sql) conn.commit() log.info("Set: " + str(account['name_local'])) except Exception as err: log.exception(err) log.error("Failed loading for bank: " + bank['description']) # Close the db cur.close() conn.close() log.info("Completed")