def calc_single_security_beta(symbol, months): benchmark = get_benchmark() print("Calculating %s month beta for %s against %s" % (str(months), symbol, benchmark)) if not verify_date_sync(symbol, months): Io.error("Date range mismatch for %s & %s" % (symbol, benchmark)) benchmark_returns = get_security_returns_list(benchmark, months) symbol_returns = get_security_returns_list(symbol, months) covar = numpy.cov(benchmark_returns, symbol_returns, ddof=1)[0][1] benchmark_var = numpy.var(benchmark_returns, ddof=1) beta = covar / benchmark_var ### begin debugging toggles ### # print("%s months" % months) # print(len(benchmark_returns)) # print(benchmark_returns) # print(len(symbol_returns)) # print(symbol_returns) # print(covar) # print(benchmark_var) # print(beta) # if (input("Continue? ").lower()) == "n": # exit(0) ### end debugging toggles ### return (beta)
def archive_ledger(): source_path = ReadLedger.get_ledger_path() if not os.path.isfile(source_path): source_path = ReadLedger.get_ledger_path(prior=True) if not os.path.isfile(source_path): return (False) archive_dir = Config.get_path("ledger_archive_dir") destination_name = os.path.split(source_path)[1] destination_name = os.path.splitext(destination_name)[ 0] + "-" + datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + ".xlsx" destination_path = os.path.join(archive_dir, destination_name) #purge old ledgers archive_months = Config.get_ledger("archive_months") earliest_date = (datetime.now().date() - relativedelta(months=archive_months)) earliest_ledger = earliest_date.strftime("%Y-%m") for i in os.listdir(archive_dir): if i[0:7] < earliest_ledger: print("Removing archive %s from %s" % (i, archive_dir)) os.remove(os.path.join(archive_dir, i)) moved = False while not moved: try: shutil.move(source_path, destination_path) moved = True except PermissionError: if not Io.yes_no( "Failed to archive existing ledger. It might be open. Try again?" ): Io.error("Aborting.") return (destination_path)
def get_accounts(): columns = [ Csv.CsvColumn(name="Account Name", is_row_identifier=True), Csv.CsvColumn(name="Institution"), Csv.CsvColumn(name="Account Type") ] accounts_csv_path = Config.get_path("accounts_csv") Csv.write_empty(accounts_csv_path, columns) accounts_csv = Csv.CsvFile(accounts_csv_path, default_column_list=columns, default_data_rows=[]) if len(accounts_csv.return_all()) == 0: Io.error("%s does not contain any accounts!" % accounts_csv_path) checking_accounts = get_accounts_by_type(accounts_csv, "Checking") savings_accounts = get_accounts_by_type(accounts_csv, "Savings") credit_cards = get_accounts_by_type(accounts_csv, "Credit Card") return (Concepts.LedgerAccountCollection( checking_accounts=checking_accounts, savings_accounts=savings_accounts, credit_cards=credit_cards))
def get_symbol_max(symbol): db_conn = sqlite3.connect(Config.get_path("history_db")) db_conn.isolation_level = None cursor = db_conn.cursor() symbol_dates = [] for record in (cursor.execute( "SELECT * FROM '%s' where adj_close != -9001;" % (str(symbol))).fetchall()): symbol_dates.append(record[0]) db_conn.close() current_date = datetime.now() if current_date.day == 1: current_date = current_date - relativedelta(months=1) last_date = current_date.replace(day=1).strftime("%Y-%m-%d") if symbol_dates[-1] != last_date: Io.error("Last date for %s should be %s but is %s" % (symbol, last_date, symbol_dates[-1])) usable_dates = [] for date in symbol_dates[::-1]: if date == last_date: usable_dates.append(date) last_date = (datetime.strptime(last_date, "%Y-%m-%d") - relativedelta(months=1)).strftime("%Y-%m-%d") else: break if len(usable_dates) == 0 or len(usable_dates) == 1: Io.error("No usable date range for %s" % symbol) return (len(usable_dates))
def sync_stock_history(positions): iexcloud_api_key = Iexcloud.get_api_key() dates = get_dates_list() history_db = get_history_db_cursor() for position in positions: print("Processing database entries for %s" % position.symbol) missing_dates = get_missing_dates(history_db, position.symbol, dates) for date in missing_dates: if Config.get_yahoo("use_yahoo") == "False": symbol = position.symbol api_request = Iexcloud.get_api_request(symbol, date, api_key = iexcloud_api_key) response = Api.make_api_request(api_request) elif Config.get_yahoo("use_yahoo") == "True": response = Yahoo.request(position.symbol, date) if len(response) > 1: Io.error("Expected 1 result from request %s, got %s" % (api_request, str(len(response)))) elif len(response) == 0: #we set the close to -9001 so we don't keep making api calls for dates before the symbol existed print("No data for %s on %s" % (position.symbol, date)) adj_close = -9001 else: adj_close = (response[0])["close"] print("Adding record for %s on %s: %s" % (position.symbol, date, adj_close)) history_db.execute("INSERT INTO '%s' VALUES ('%s','%s')" % (position.symbol, date, adj_close))
def make_api_request(api_request, stop_on_err=None): response = requests.get(api_request) status = str(response.status_code) if status.startswith("2"): return (response.json()) else: if stop_on_err == False: return (None) Io.error("Api request %s returned %s error" % (api_request, status))
def is_prod(api_key): if api_key.startswith("Tsk"): return (False) elif api_key.startswith("pk_"): return (True) elif api_key == "": Io.error("No iexcloud API key provided in config.") else: Io.error("Unknown api key %s" % api_key)
def get_custom_cells(): custom_cells = [] assets = [] liabilities = [] #copied from zm_ledger.load_ledger_entries() #lazy, lazy current_ledger = ReadLedger.get_ledger_path() prior_ledger = ReadLedger.get_ledger_path(prior=True) if os.path.isfile(current_ledger) and os.path.isfile(prior_ledger): Io.error( "Current and prior ledgers both exist. Possibly unarchived prior ledger. Aborting." ) if not os.path.isfile(current_ledger) and not os.path.isfile(prior_ledger): print("No existing ledgers found. Writing empty ledger.") return ([["Cash", round(float(0), 2)]]) if os.path.isfile(current_ledger): print("Loading current month ledger overview.") ledger = current_ledger if os.path.isfile(prior_ledger): print("Loading prior month ledger overview.") ledger = prior_ledger xl_workbook = openpyxl.load_workbook(ledger) try: xl_worksheet = xl_workbook["Overview"] except KeyError: print("No overview sheet found.") return ([["Cash", round(float(0), 2)]]) cash_value = (xl_worksheet.cell(row=8, column=2)).value try: cash_value = round(float(cash_value), 2) except (TypeError, ValueError): cash_value = round(float(0), 2) custom_cells.append(["Cash", cash_value]) for row in range(3, xl_worksheet.max_row + 1): blanks = [None, ""] skip_assets = ["Misc Assets"] asset_desc = (xl_worksheet.cell(row=row, column=6)).value asset_amt = (xl_worksheet.cell(row=row, column=7)).value liab_desc = (xl_worksheet.cell(row=row, column=9)).value liab_amt = (xl_worksheet.cell(row=row, column=10)).value if not asset_desc in blanks or not asset_amt in blanks: if not asset_desc in skip_assets: assets.append([asset_desc, round(float(asset_amt), 2)]) if not liab_desc in blanks or not liab_amt in blanks: liabilities.append([liab_desc, round(float(liab_amt), 2)]) #add in misc assets from separate file assets.append(["Misc Assets", round(float(MiscAssets.get_sum()), 2)]) return ([custom_cells, assets, liabilities])
def download(symbol): remote_url = r"https://www.exchangerates.org.uk/commodities/%s-history.html" % symbol.upper( ) local_file = get_filepath(symbol) if not os.path.isfile(local_file): try: urllib.request.urlretrieve(remote_url, local_file) return (True) except urllib.error.HTTPError: Io.error("Failed to download history for %s" % (symbol))
def get_adj_close(path, date): columns = [Csv.CsvColumn(name="Date"), Csv.CsvColumn(name="Adj Close")] csv_file = Csv.CsvFile(path, columns, []) entry = csv_file.find_by_dict({"Date": date}) if len(entry) == 1: return (entry[0]["Adj Close"]) elif len(entry) == 0: return (None) else: Io.error("Expected 0 or 1 results, got %s for query %s in %s." % (str(len(entry)), str(date), str(path)))
def add_column(self, column_to_add): if column_to_add.is_row_identifier: for existing_column in self.columns: if existing_column.is_row_identifier: zr_io.error("Tried to add %s as row identifier, but %s already exists as row identifier." % (column_to_add.name, existing_column.name)) if existing_column.name == column_to_add.name: zr_io.error("Tried to add %s to column map, but it already exists." % column_to_add.name) self.columns.append(column_to_add)
def main(): columns = [ Csv.CsvColumn("Symbol", is_row_identifier=True), Csv.CsvColumn("Description"), Csv.CsvColumn("Quantity", data_type="float"), Csv.CsvColumn("Multiplier", data_type="float") ] default_data = [["XAG-USD", "Silver, Troy Ounce", 0, .9], ["XAU-USD", "Gold, Troy Ounce", 0, .9]] metals_csv = Csv.CsvFile(Config.get_path("metals_csv"), default_column_list=columns, default_data_rows=default_data, stop_on_write=False, read_only=False) metals = [] symbol_string = "" for metal in metals_csv.return_all(): if metal["Quantity"] > 0: metals.append(metal) symbol_string += (metal["Symbol"]).replace("-USD", "") + "," symbol_string = symbol_string.rstrip(",") live_prices_request = MetalsApi.get_live_request(symbol_string) live_prices = Api.make_api_request(live_prices_request, stop_on_err=False) if live_prices != None: if live_prices["success"] == False: print("Falling back to Yahoo futures for live metal prices.") live_prices = None return_metals = [] for metal in metals: if live_prices != None: last_price = float(1 / float( live_prices["rates"][metal["Symbol"].replace("-USD", "")])) else: if metal["Symbol"] == "XAG-USD": symbol = "SI=F" else: Io.error("Unknown metal symbol %s" % metal["Symbol"]) last_price = Yahoo.get_live_price(symbol) new_metal = Instruments.Metal(metal["Symbol"], metal["Description"], metal["Quantity"], metal["Multiplier"], last_price=last_price) return_metals.append(new_metal) return (return_metals)
def get_treasuries(): treasuries = [] treasuries_source = zr_config.get_path("treasuries") if not os.path.isfile(treasuries_source): print("Treasuries file not found.") else: print("Processing %s" % treasuries_source) re_variables_section = re.compile("\<\!\-\- Hidden Variables \-\-\>.*\<\!\-\- Hidden Variables \-\-\>", re.DOTALL) treasuries_file_contents = "" with open(treasuries_source, "r") as f: for line in f: treasuries_file_contents += line variables_section = re_variables_section.search(treasuries_file_contents) if not variables_section: print("Variables section not found in treasuries file.") #process treasuries file else: #get variables from treasuries file variables = [] variables_section = variables_section.group(0) re_variable = re.compile("\<input type.*?\>") for variable in re_variable.findall(variables_section): variables.append(zt_html.SiteTreasuryBond(variable)) #open treasury site and submit variables browser = mechanicalsoup.StatefulBrowser() try_count = 0 while try_count < 10: try: sleep(1) browser.open("https://www.treasurydirect.gov/BC/SBCPrice") break except: if try_count == 10: Io.error("Treasury site not responding. Aborting.") print("Treasury site not responding. Retrying.") try_count += 1 browser.select_form('form[action="https://www.treasurydirect.gov/BC/SBCPrice"]') for variable in variables: browser[variable.name] = variable.value treasuries_return = browser.submit_selected().text #extract bond data returned from treasuries site re_bond_data = re.compile("\<td class\=\"lft\".*?\<\/strong\>\<\/td>", re.DOTALL) for bond in re_bond_data.findall(treasuries_return): treasuries.append(Instruments.TreasuryBond(bond)) return(treasuries)
def get_balance(coin_type, address): print("Checking %s address %s" % (coin_type, address)) api_request = r"https://api.blockcypher.com/v1/%s/main/addrs/%s/balance" % ( coin_type, address) response = requests.get(api_request) status = str(response.status_code) if status.startswith("2"): output = response.json() base = output["balance"] if coin_type in ["ltc", "btc"]: return (base / 100000000) elif coin_type == "eth": return (base / 1000000000000000000) else: return (base) else: Io.error("Api request %s returned %s error" % (api_request, status))
def request(symbol, date): csv_path = get_filepath(symbol) date = Calendar.get_trading_date(date) adj_close = "null" if adj_close != None: if "null" in adj_close: csv_path = get_filepath(symbol, prepend="retry-") if download(symbol, date, csv_path) == False: print( "Failed to get data from yahoo. Falling back to iexcloud.") iexcloud_request = Iexcloud.get_api_request(symbol, date) iexcloud_response = Api.make_api_request(iexcloud_request) if len(iexcloud_response) == 0: return_empty = True elif len(iexcloud_response) == 1: adj_close = str(iexcloud_response[0]["close"]) try: if float(adj_close) == 0: return_empty = True except (TypeError, ValueError): if str(adj_close) == "": return_empty = True else: adj_close = get_adj_close(csv_path, date) return_empty = False if adj_close == None: return_empty = True elif "null" in adj_close: return_empty = True else: try: adj_close = float(adj_close) except (TypeError, ValueError): Io.error("Unanticipated value %s in adj_close for %s" % (str(adj_close), symbol)) if return_empty == True: return ([]) elif return_empty == False: return ([{"close": adj_close}])
def get_close(symbol, date): months = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ] year = date[:4] month = months[int(date[5:7]) - 1] day = str(int(date[8:10])) row_date = " %s %s %s" % (day, month, year) file_path = get_filepath(symbol) if not os.path.isfile(file_path): download(symbol) er_html = lxml_html.parse(file_path) table = (Html.get_by_xpath(er_html, ".//table[@id='hist']", min_results=1, max_results=1, description="History Table"))[0] wanted_row = Html.get_by_xpath(table, ".//td[contains(text(), '%s')]/.." % row_date, min_results=1, max_results=1, description="Row for %s" % date)[0] value_cell = Html.get_by_xpath(wanted_row, ".//td", min_results=4, max_results=4, description="Cells for row %s" % date)[2] try: return (float(value_cell.text)) except: Io.error("Failed to convert closing value %s for %s on %s to float." % (str(close), symbol, date))
def __init__(self, name, data_type = None, is_row_identifier = None, overwrite_with_default = None): if data_type == None: data_type = "str" if is_row_identifier == None: is_row_identifier = False if overwrite_with_default == None: overwrite_with_default = False self.name = name self.is_row_identifier = is_row_identifier self.default_position = None self.file_position = None self.data_type = data_type self.overwrite_with_default = overwrite_with_default self.final_position = None if is_row_identifier: if name == "": zr_io.error("Unnamed column cannot be row identifier.") if overwrite_with_default: zr_io.error("Cannot have overwrite_with_default set to True for row identifiers.") if data_type not in ["str", "int", "float", "bool"]: zr_io.error("Unknown data type %s" % data_type)
def __init__(self, path, default_column_list = None, default_data_rows = None, stop_on_write = None, read_only = None): if default_column_list == None: default_column_list = [] if default_data_rows == None: default_data_rows = [] if stop_on_write == None: stop_on_write = True if read_only == None: read_only = True self.read_only = read_only if not self.read_only: if len(default_column_list) == 0: print("Empty default column list. Setting read-only to True.") self.read_only = True if len(default_data_rows) == 0: print("No default data rows provided. Setting read-only to True.") self.read_only = True #check validity of supplied defaults if len(default_data_rows): for row in default_data_rows: if type(row) != type([]): zr_io.error("Default data row should be a list of list, but contains %s" % (type(row))) if len(default_column_list): test_column = CsvColumn("test") for default_column in default_column_list: if type(default_column) != type(test_column): zr_io.error("Default column list should contain CsvColumn objects, but contains %s" % type(default_column)) del test_column default_column_map = CsvColumnMap(default_column_list) if not read_only: if not default_column_map.get_identifier(): zr_io.error("No default columns marked as row identifier.") col_counter = 0 for column in default_column_map.columns: column.default_position = col_counter col_counter += 1 self.set_types(default_data_rows, default_column_map) self.write_file_flag = False if os.path.isfile(path): file_data_rows = [] file_data_rows = self.csv_to_list(path) file_column_map = self.map_file_columns(file_data_rows, default_column_map) #delete the header row now that we've got what we need from it if len(file_data_rows): del file_data_rows[0] self.set_types(file_data_rows, file_column_map) col_counter = 0 for column in file_column_map.columns: column.file_position = col_counter col_counter += 1 self.column_map = self.combine_column_maps(default_column_map, file_column_map) self.data = self.combine_data_rows(default_data_rows, file_data_rows) #if the file doesn't already exist else: self.column_map = default_column_map self.write_file_flag = True self.data = default_data_rows #finalize/normalize the info we're holding col_counter = 0 for column in self.column_map.columns: column.final_position = col_counter col_counter += 1 if self.write_file_flag: self.write_file(path, stop_on_write)
def load_ledger_entries(account): current_ledger = get_ledger_path() prior_ledger = get_ledger_path(prior = True) if os.path.isfile(current_ledger) and os.path.isfile(prior_ledger): Io.error("Current and prior ledgers both exist. Possibly unarchived prior ledger. Aborting.") if not os.path.isfile(current_ledger) and not os.path.isfile(prior_ledger): print("No existing ledgers found. Writing empty ledger.") return(account) roll_forward = False if os.path.isfile(current_ledger): print("Loading current month ledger for %s." % account.name) ledger = current_ledger if os.path.isfile(prior_ledger): print("Loading prior month ledger for %s." % account.name) ledger = prior_ledger roll_forward = True xl_workbook = openpyxl.load_workbook(ledger) #get book entries if account.account_type == "Credit Card": book_tab = "Credit Cards" else: book_tab = account.institution rec_tab = account.name + " - Rec" load_rec_info(xl_workbook, account, rec_tab) book_entries = get_entries(xl_workbook, book_tab, account.name) book_adjustments = get_entries(xl_workbook, rec_tab, "Book Adjustments") bank_adjustments = get_entries(xl_workbook, rec_tab, "Bank Adjustments") if roll_forward == False: balance_entry = book_entries[0] account.book_balance = float(balance_entry.amount) book_entries.remove(balance_entry) account.book_adjustments = book_adjustments else: balance = float(0) for entry in book_entries: balance += entry.amount account.book_balance = balance book_entries = [] for adjustment in book_adjustments: if adjustment.date.year == datetime.now().year and adjustment.date.month == datetime.now().month: book_entries.append(adjustment) for entry in book_entries: book_adjustments.remove(entry) account.book_entries = book_entries account.book_adjustments = book_adjustments account.bank_adjustments = bank_adjustments if roll_forward == True: if account.rec_date.date() != datetime.now().date(): print(account.rec_date.date()) print(datetime.now().date()) Io.error("Bad reconciliation date. Reconciliation must be done on day of roll forward.") book_rec_bal = float(account.book_balance) bank_rec_bal = float(account.bank_balance) for entry in account.book_entries: book_rec_bal += float(entry.amount) for entry in account.book_adjustments: book_rec_bal += float(entry.amount) for entry in account.bank_adjustments: bank_rec_bal += float(entry.amount) book_rec_bal = round(book_rec_bal, 2) bank_rec_bal = round(bank_rec_bal, 2) if bank_rec_bal != book_rec_bal: Io.error("Reconciliation failed. Accounts must be reconciled before rolling forward.")