def main(): ltc = Instruments.CryptoCurrency( "ltc", get_used_electrum_addresses(Config.get_path("electrum_ltc_wallets"), "ltc")) btc = Instruments.CryptoCurrency( "btc", get_used_electrum_addresses(Config.get_path("electrum_btc_wallets"), "bc")) eth = Instruments.CryptoCurrency( "eth", get_mycrypto_addresses(Config.get_path("mycrypto_eth_wallets"))) crypto_list = [ltc, btc, eth] for crypto in crypto_list: for address in crypto.addresses: crypto.add_balance(get_balance(crypto.currency, address)) sleep(.5) return_list = [] for crypto in crypto_list: if crypto.balance > 0: return_list.append(crypto) return (return_list)
def archive_ledger(): source_path = ReadLedger.get_ledger_path() if not os.path.isfile(source_path): source_path = ReadLedger.get_ledger_path(prior=True) if not os.path.isfile(source_path): return (False) archive_dir = Config.get_path("ledger_archive_dir") destination_name = os.path.split(source_path)[1] destination_name = os.path.splitext(destination_name)[ 0] + "-" + datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + ".xlsx" destination_path = os.path.join(archive_dir, destination_name) #purge old ledgers archive_months = Config.get_ledger("archive_months") earliest_date = (datetime.now().date() - relativedelta(months=archive_months)) earliest_ledger = earliest_date.strftime("%Y-%m") for i in os.listdir(archive_dir): if i[0:7] < earliest_ledger: print("Removing archive %s from %s" % (i, archive_dir)) os.remove(os.path.join(archive_dir, i)) moved = False while not moved: try: shutil.move(source_path, destination_path) moved = True except PermissionError: if not Io.yes_no( "Failed to archive existing ledger. It might be open. Try again?" ): Io.error("Aborting.") return (destination_path)
def get_sum(): misc_sum = float(0) misc_path = Config.get_path("misc_assets_csv") if not isfile(misc_path): with open(misc_path, "w") as f: f.write("Description,Serial No.,Estimated Value,Notes") default_column_map = [ Csv.CsvColumn(name="Description", data_type="str", is_row_identifier=True), Csv.CsvColumn(name="Serial No.", data_type="str"), Csv.CsvColumn(name="Estimated Value", data_type="float"), Csv.CsvColumn(name="Notes", data_type="str") ] default_data_rows = [] csv_file = Csv.CsvFile(misc_path, default_column_map, default_data_rows, read_only=True) for i in csv_file.return_all(): try: value = float(i["Estimated Value"]) misc_sum += value except ValueError: Io.nostop("Error parsing %s as float number in %s" % (str(value), misc_path)) return (misc_sum)
def get_filepath(symbol): destination_dir = Config.get_path("ex_rates_dir") if not os.path.isdir(destination_dir): os.makedirs(destination_dir) prepend = str(datetime.now().strftime("%Y-%m-%d")) filename = prepend + "-" + symbol + ".htm" return (os.path.join(destination_dir, filename))
def get_symbol_max(symbol): db_conn = sqlite3.connect(Config.get_path("history_db")) db_conn.isolation_level = None cursor = db_conn.cursor() symbol_dates = [] for record in (cursor.execute( "SELECT * FROM '%s' where adj_close != -9001;" % (str(symbol))).fetchall()): symbol_dates.append(record[0]) db_conn.close() current_date = datetime.now() if current_date.day == 1: current_date = current_date - relativedelta(months=1) last_date = current_date.replace(day=1).strftime("%Y-%m-%d") if symbol_dates[-1] != last_date: Io.error("Last date for %s should be %s but is %s" % (symbol, last_date, symbol_dates[-1])) usable_dates = [] for date in symbol_dates[::-1]: if date == last_date: usable_dates.append(date) last_date = (datetime.strptime(last_date, "%Y-%m-%d") - relativedelta(months=1)).strftime("%Y-%m-%d") else: break if len(usable_dates) == 0 or len(usable_dates) == 1: Io.error("No usable date range for %s" % symbol) return (len(usable_dates))
def get_accounts(): columns = [ Csv.CsvColumn(name="Account Name", is_row_identifier=True), Csv.CsvColumn(name="Institution"), Csv.CsvColumn(name="Account Type") ] accounts_csv_path = Config.get_path("accounts_csv") Csv.write_empty(accounts_csv_path, columns) accounts_csv = Csv.CsvFile(accounts_csv_path, default_column_list=columns, default_data_rows=[]) if len(accounts_csv.return_all()) == 0: Io.error("%s does not contain any accounts!" % accounts_csv_path) checking_accounts = get_accounts_by_type(accounts_csv, "Checking") savings_accounts = get_accounts_by_type(accounts_csv, "Savings") credit_cards = get_accounts_by_type(accounts_csv, "Credit Card") return (Concepts.LedgerAccountCollection( checking_accounts=checking_accounts, savings_accounts=savings_accounts, credit_cards=credit_cards))
def get_ledger_path(prior = None): ledger_dir = Config.get_path("money_dir") base_time = datetime.today() if prior == True: base_time = base_time - relativedelta(months = 1) file_date = base_time.strftime("%Y-%m") ledger_path = os.path.join(ledger_dir, (file_date + "-Ledger.xlsx")) return(ledger_path)
def main(investments=None): if investments == None: investments = get_investments() xl_wb_path = os.path.join(Config.get_path("reports_dir"), "Investments.xlsx") xl_workbook = xlsxwriter.Workbook(xl_wb_path) write_worksheet(xl_workbook, investments) Excel.save_workbook(xl_workbook, xl_wb_path) return (investments)
def get_history_db_cursor(skip_purge_check = None): #connect to history db db_path = Config.get_path("history_db") db_conn = sqlite3.connect(db_path) db_conn.isolation_level = None cursor = db_conn.cursor() if skip_purge_check != True: purge(cursor) return(cursor)
def main(): columns = [ Csv.CsvColumn("Symbol", is_row_identifier=True), Csv.CsvColumn("Description"), Csv.CsvColumn("Quantity", data_type="float"), Csv.CsvColumn("Multiplier", data_type="float") ] default_data = [["XAG-USD", "Silver, Troy Ounce", 0, .9], ["XAU-USD", "Gold, Troy Ounce", 0, .9]] metals_csv = Csv.CsvFile(Config.get_path("metals_csv"), default_column_list=columns, default_data_rows=default_data, stop_on_write=False, read_only=False) metals = [] symbol_string = "" for metal in metals_csv.return_all(): if metal["Quantity"] > 0: metals.append(metal) symbol_string += (metal["Symbol"]).replace("-USD", "") + "," symbol_string = symbol_string.rstrip(",") live_prices_request = MetalsApi.get_live_request(symbol_string) live_prices = Api.make_api_request(live_prices_request, stop_on_err=False) if live_prices != None: if live_prices["success"] == False: print("Falling back to Yahoo futures for live metal prices.") live_prices = None return_metals = [] for metal in metals: if live_prices != None: last_price = float(1 / float( live_prices["rates"][metal["Symbol"].replace("-USD", "")])) else: if metal["Symbol"] == "XAG-USD": symbol = "SI=F" else: Io.error("Unknown metal symbol %s" % metal["Symbol"]) last_price = Yahoo.get_live_price(symbol) new_metal = Instruments.Metal(metal["Symbol"], metal["Description"], metal["Quantity"], metal["Multiplier"], last_price=last_price) return_metals.append(new_metal) return (return_metals)
def get_treasuries(): treasuries = [] treasuries_source = zr_config.get_path("treasuries") if not os.path.isfile(treasuries_source): print("Treasuries file not found.") else: print("Processing %s" % treasuries_source) re_variables_section = re.compile("\<\!\-\- Hidden Variables \-\-\>.*\<\!\-\- Hidden Variables \-\-\>", re.DOTALL) treasuries_file_contents = "" with open(treasuries_source, "r") as f: for line in f: treasuries_file_contents += line variables_section = re_variables_section.search(treasuries_file_contents) if not variables_section: print("Variables section not found in treasuries file.") #process treasuries file else: #get variables from treasuries file variables = [] variables_section = variables_section.group(0) re_variable = re.compile("\<input type.*?\>") for variable in re_variable.findall(variables_section): variables.append(zt_html.SiteTreasuryBond(variable)) #open treasury site and submit variables browser = mechanicalsoup.StatefulBrowser() try_count = 0 while try_count < 10: try: sleep(1) browser.open("https://www.treasurydirect.gov/BC/SBCPrice") break except: if try_count == 10: Io.error("Treasury site not responding. Aborting.") print("Treasury site not responding. Retrying.") try_count += 1 browser.select_form('form[action="https://www.treasurydirect.gov/BC/SBCPrice"]') for variable in variables: browser[variable.name] = variable.value treasuries_return = browser.submit_selected().text #extract bond data returned from treasuries site re_bond_data = re.compile("\<td class\=\"lft\".*?\<\/strong\>\<\/td>", re.DOTALL) for bond in re_bond_data.findall(treasuries_return): treasuries.append(Instruments.TreasuryBond(bond)) return(treasuries)
def get_security_returns_list(symbol, months): return_list = [] #build db connection db_conn = sqlite3.connect(Config.get_path("history_db")) db_conn.isolation_level = None cursor = db_conn.cursor() #pull one more month than we're calculating return for because we #need prior month's closing to calculate return for first month pull_months = months + 1 last = None for record in cursor.execute( "SELECT %s FROM '%s' where adj_close != -9001 ORDER BY date DESC LIMIT %s;" % ("adj_close", symbol, pull_months)).fetchall()[::-1]: if last: return_list.append(((record[0] - last) / last) * 100) last = record[0] db_conn.close() return (return_list)
def verify_date_sync(symbol, months): #build db connection db_conn = sqlite3.connect(Config.get_path("history_db")) db_conn.isolation_level = None cursor = db_conn.cursor() pull_months = months + 1 benchmark_dates = [] symbol_dates = [] for record in cursor.execute( "SELECT date FROM '%s' where adj_close != -9001 ORDER BY date DESC LIMIT %s;" % (symbol, pull_months)).fetchall()[::-1]: symbol_dates.append(record[0]) for record in cursor.execute( "SELECT date FROM '%s' where adj_close != -9001 ORDER BY date DESC LIMIT %s;" % (get_benchmark(), pull_months)).fetchall()[::-1]: benchmark_dates.append(record[0]) db_conn.close() if benchmark_dates == symbol_dates: return (True) return (False)
def get_symbol(symbol, element, default_type=None, quickrun=None): #for securities_info.csv source column_list = [ Csv.CsvColumn("Symbol", is_row_identifier=True), Csv.CsvColumn("Industry"), Csv.CsvColumn("Purpose") ] default_data = [[symbol, "Unknown", "Unknown"]] file_path = Config.get_path("securities_info") csv_file = Csv.CsvFile(file_path, default_column_list=column_list, default_data_rows=default_data, read_only=True) security_row = csv_file.find_by_id(symbol) #prompt for security info if it's unknown if quickrun != True: write_new_data = False for info_element in [["Industry", 1], ["Purpose", 2]]: if security_row[info_element[0]] == "Unknown": write_new_data = True new_value = set_new_info(symbol, info_element[0], csv_file) default_data[0][info_element[1]] = new_value csv_file = Csv.CsvFile(file_path, default_column_list=column_list, default_data_rows=default_data, read_only=False, stop_on_write=False) security_row = csv_file.find_by_id(symbol) if symbol != "T-Bond" and security_row[element] != "Benchmark": verify_manual_beta(symbol) return (security_row[element])
def load_portfolio_betas(positions): sub_year_interval_months = Config.get_beta("subyear_interval") max_beta_years = Config.get_beta("max_years") #build db connection db_conn = sqlite3.connect(Config.get_path("history_db")) db_conn.isolation_level = None cursor = db_conn.cursor() #make sure we've got enough benchmark records to do the full desired span benchmark_max = max_beta_years * 12 benchmark_count = (cursor.execute( "SELECT COUNT(*) FROM '%s' where adj_close != -9001;" % (str(get_benchmark()))).fetchall()[0][0]) #adjusting counts by 1 here to make sure we can go back a month before #the start of the beta period to get full return information if benchmark_count < (benchmark_max + 1): benchmark_max = (benchmark_count - 1) print( "Benchmark records insufficient for desired beta span. Setting maximum months to %s" % str(benchmark_max)) for position in positions: print("Processing %s" % str(position.symbol)) get_manual_average_beta(position) if not position.risk_free == True: #make sure we've got enough records for the symbol to match the benchmark/desired span symbol_max = benchmark_max symbol_count = (cursor.execute( "SELECT COUNT(*) FROM '%s' where adj_close != -9001;" % (str(position.symbol))).fetchall()[0][0]) #adjusting counts by 1 as when checking benchmark if symbol_count < (symbol_max + 1): symbol_max = (symbol_count - 1) print( "Symbol records insufficient for desired beta span. Setting maximum months to %s for symbol." % symbol_max) true_symbol_max = get_symbol_max(position.symbol) if true_symbol_max < (symbol_max + 1): symbol_max = (true_symbol_max - 1) print( "Symbol records insufficient for desired beta span. Setting maximum months to %s for symbol." % symbol_max) if symbol_max >= 2: #check here for our sub-year breakdown #we're assuming max_beta_years has been set to at least one #if not, you're just trying to break things for months in range(1, 12): if months > symbol_max: break else: if sub_year_interval_months: if not months % sub_year_interval_months: position.add_beta( months, calc_single_security_beta( str(position.symbol), months)) #switch to breakdown by year for year in range(Config.get_beta("min_years"), max_beta_years + 1): months = year * 12 if months > symbol_max: break else: position.add_beta( months, calc_single_security_beta(str(position.symbol), months)) #make sure to hit max-length beta if there weren't enough records #to go as long as we wanted on this position max_done = False #if we have less than a year of data for the symbol #we check the sub-year interval to make sure it's been done if symbol_max < 12: if sub_year_interval_months: if not symbol_max % sub_year_interval_months: max_done = True #if it's at least a year, we check to see if it got #hit during the year-by-year breakdown else: if not symbol_max % 12: max_done = True if not max_done: position.add_beta( symbol_max, calc_single_security_beta(str(position.symbol), symbol_max)) else: print("Insufficient history to calculate beta.") db_conn.close() return (0)
def print_xlsx(positions, filename_suffix=None): filename = "Beta" if filename_suffix: filename += filename_suffix filename += ".xlsx" xl_wb_path = os.path.join(Config.get_path("reports_dir"), filename) xl_workbook = xlsxwriter.Workbook(xl_wb_path) xl_dir = os.path.split(xl_wb_path)[0] if not os.path.isdir(xl_dir): os.makedirs(xl_dir) industries = [] purposes = [] for position in positions: industry = str(position.industry).strip().title() if not industry in industries: industries.append(industry) purpose = str(position.purpose).strip().title() if not purpose in purposes: purposes.append(purpose) industries.sort() purposes.sort() beta_sheets = [ print_beta_sheet(positions, xl_workbook, "Beta - Master", None) ] #track our breakdown sheets and their total beta cells #for referencing in the overview sheet breakdown_beta_locations = [] #we have to designate disctinct numerical identifiers for each list #in case the lists are the same #tends to happen with the default bond script categories = [[industries, 0], [purposes, 1]] for category in categories: for entry in category[0]: entry_positions = [] for position in positions: if category[1] == 0: criteria = position.industry sheet_name_base = "Industry" elif category[1] == 1: criteria = position.purpose sheet_name_base = "Purpose" sheet_name = sheet_name_base + " Beta - %s" % entry if criteria.title() == entry.title(): entry_positions.append(position) new_beta_sheet = print_beta_sheet(entry_positions, xl_workbook, sheet_name, beta_sheets[0]) if new_beta_sheet: beta_sheets.append(new_beta_sheet) print_overview(xl_workbook, "Beta - Overview", beta_sheets) Excel.save_workbook(xl_workbook, xl_wb_path)
def clean(): print("Cleaning download dir") download_dir = Config.get_path("ex_rates_dir") for i in os.listdir(download_dir): os.remove(os.path.join(download_dir, i))