def main_menu(session, base_url): """ Provides the different options for the sample application: Market Quotes, Account List :param session: authenticated session """ menu_items = {"1": "Market Quotes", "2": "Account List", "3": "Exit"} while True: print("") options = menu_items.keys() for entry in options: print(entry + ")\t" + menu_items[entry]) selection = input("Please select an option: ") if selection == "1": market = Market(session, base_url) market.quotes() elif selection == "2": accounts = Accounts(session, base_url) accounts.account_list() elif selection == "3": break else: print("Unknown Option Selected!")
def oauth(): """Allows user authorization for the sample application with OAuth 1""" etrade = OAuth1Service( name="etrade", consumer_key=config["DEFAULT"]["CONSUMER_KEY"], consumer_secret=config["DEFAULT"]["CONSUMER_SECRET"], request_token_url="https://api.etrade.com/oauth/request_token", access_token_url="https://api.etrade.com/oauth/access_token", authorize_url= "https://us.etrade.com/e/t/etws/authorize?key={}&token={}", base_url="https://api.etrade.com") # Step 1: Get OAuth 1 request token and secret request_token, request_token_secret = etrade.get_request_token( params={ "oauth_callback": "oob", "format": "json" }) # Step 2: Go through the authentication flow. Login to E*TRADE. # After you login, the page will provide a text code to enter. authorize_url = etrade.authorize_url.format(etrade.consumer_key, request_token) webbrowser.open(authorize_url) text_code = input( "Please accept agreement and enter text code from browser: ") # Step 3: Exchange the authorized request token for an authenticated OAuth 1 session session = etrade.get_auth_session(request_token, request_token_secret, params={"oauth_verifier": text_code}) base_url = "https://api.etrade.com" accounts = Accounts(session, base_url) accounts.account_list()
def calculate_annualized_returns(self): money_in = 0 avg_age_of_money = 0 monies = self.execute_select( "select transaction_date, amount from deposits_and_withdrawls;") for a in monies: money_in += a[1] for a in monies: days_old = (date.today() - a[0]).days proportion_of_total_money = a[1] / money_in avg_age_of_money += (proportion_of_total_money * days_old) accounts = Accounts(header=self.header).get_account_response() cash = accounts['available_cash_balance'] pending = accounts['pending_investments_primary_market'] notes = Notes() principal_with_late, principal = notes.get_account_value() total_monies = principal + pending + cash total_monies_with_late = principal_with_late + pending + cash account_value_change = total_monies - float(money_in) account_value_percent_change = account_value_change / float(money_in) account_value_change_with_late = total_monies_with_late - float( money_in) account_value_percent_change_with_late = account_value_change_with_late / float( money_in) annualized_returns = account_value_percent_change**float( (avg_age_of_money / 365)) annualized_returns_with_late = account_value_percent_change_with_late**float( (avg_age_of_money / 365)) return round(annualized_returns * 100, 4), round(annualized_returns_with_late * 100, 4)
def update_deposits_and_withdrawls_table(self): account = Accounts(self.header) account_response = account.get_account_response() deposit_query = "select amount, transaction_date from deposits_and_withdrawls where id = (select max(id) from deposits_and_withdrawls where amount > 0);" withdrawl_query = "select amount, transaction_date from deposits_and_withdrawls where id = (select max(id) from deposits_and_withdrawls where amount < 0);" deposit_results = self.execute_select(deposit_query) withdrawl_results = self.execute_select(withdrawl_query) last_deposit_date_from_prosper = datetime.datetime.strptime( account_response['last_deposit_date'][0:10], "%Y-%m-%d").date() last_deposit_amount = deposit_results[0][0] last_deposit_date = deposit_results[0][1] last_withdrawl_date_from_prosper = datetime.datetime.strptime( account_response['last_withdraw_date'][0:10], "%Y-%m-%d").date() last_withdraw_amount = withdrawl_results[0][0] last_withdrawl_date = withdrawl_results[0][1] if float(last_deposit_amount) != account_response[ 'last_deposit_amount'] or last_deposit_date != last_deposit_date_from_prosper: self.logger.info( "inserting new deposit record of {dep} on {date} ".format( dep=account_response['last_deposit_amount'], date=last_deposit_date_from_prosper)) self.execute_insert_or_update( "insert into deposits_and_withdrawls (transaction_date, amount, created_ts, modified_ts) values ('{date}', {amt}, '{time}' ,null)" .format(date=last_deposit_date_from_prosper, amt=account_response['last_deposit_amount'], time=datetime.datetime.today())) else: print("We good bro") if last_withdraw_amount != ( account_response['last_withdraw_amount'] * -1) or last_withdrawl_date != last_withdrawl_date_from_prosper: self.logger.info( "inserting new withdrawl record of {dep} on {date} ".format( dep=account_response['last_withdraw_amount'], date=last_withdrawl_date_from_prosper)) self.execute_insert_or_update( "insert into deposits_and_withdrawls (transaction_date, amount, created_ts, modified_ts) values ('{date}', {amt}, '{time}' ,null)" .format(date=account_response['last_withdraw_date'][0:10], amt=account_response['last_withdraw_amount'] * -1, time=datetime.datetime.today())) else: print("We good bro")
client_id=config['prosper']['client_id'], client_secret=config['prosper']['client_secret'], ps=config['prosper']['ps'], username=config['prosper']['username']).execute() header = utils.http_header_build(access_token) order_header = utils.http_header_build_orders(access_token) bid_amt = utils.get_bid_amount( starting_bid_amt=config['bid_size']['bid'], starting_bid_date=config['bid_size']['starting_bid_date'], implement_increasing_recurring_bid=config['bid_size'] ['implement_increasing_recurring_bid'], increase_amt=config['bid_size']['weekly_increase_amt']) account = Accounts(header) cash_balance = math.floor( account.get_account_response()['available_cash_balance']) # time.sleep(10) # Typically takes ~ 20, 25 seconds to post, but there are outliers. Running a loop seems more effective at getting the most amount of listings vs searhcing for new listings and then running. # Started on 11/23/20. Very first note found was expired... a 4K E rated note... So looks like i may not be able to speed up my proccess enough to get all notes... # Still worthwhile to see if SearchAndDestroy gets better results. # Turning off min_run_time on 12/16/2020 to see if expired due to demand decreases. # 11/23/20 - 12/16/2020 was 5.63% expired due to demand (4 out of 71) # Started no min_run_time and just let get throttled on 12/17/20 beg of day # Turned min_run_time back on for 1/12/21. 12/17/20 - 1/11/21 was 8 / 81 was expired due to demand 9.8%... # 1/12/21 - 1/26/21 had 4/61 expired due to demand 6.5% # On 1/27/21 turned off all 5 HR filters to see if reduced expired finds. # 1/27/21 - X had X / Y expired due to demand Z%. (Looking for sub 5% if not def turn back on HR, if so reevaluate #TODO Is 5 HR filters worth having considering virtually no HR loans out there?
# n = Notes() # print(n.get_account_value()) config = default.config access_token = TokenGeneration( client_id=config['prosper']['client_id'], client_secret=config['prosper']['client_secret'], ps=config['prosper']['ps'], username=config['prosper']['username']).execute() # header = utils.http_header_build(access_token) # # # # # a = Accounts(header) print(a.get_account_response()) # # for i in range(10): # # a = Accounts(header).get_account_response() # print(a.get_account_response()) # print(a) # # tm = TrackingMetrics() # # order = tm.get_order_response_by_order_id("51c907be-8534-4d69-abe7-db664ef438dc") # print(order.json()) # tm.update_deposits_and_withdrawls_table() # ar = AnnualizedReturns(header) # ar.update_annualized_returns_table() # print(ar.calculate_annualized_returns())
fp1 = open(file_location_defaults, 'rb') img1 = MIMEImage(fp1.read()) fp1.close() img1.add_header('Content-ID', '<{}>'.format(file_location_defaults)) msg.attach(img1) fp2 = open(file_location_annualized_returns, 'rb') img2 = MIMEImage(fp2.read()) fp2.close() img2.add_header('Content-ID', '<{}>'.format(file_location_annualized_returns)) msg.attach(img2) # add avg daily outstanding yield chart return msg accounts = Accounts(header) listing = Listing(header=header) # path_to_save = default.base_path + '/log/daily_metrics.png' path_to_save_defaults = default.base_path + '/log/daily_defaults.png' path_to_save_annualized_returns = default.base_path + '/log/daily_annualized_returns.png' c = CreateDailyMetricsTable( start_date="2020-03-02", path_to_save_defaults=path_to_save_defaults, path_to_save_annualized_returns=path_to_save_annualized_returns) # c.create_line_graph_metrics_png() c.create_default_tracking_line_graph_png() c.create_annualized_returns_line_graph() msg = create_email_message( from_address=default.config['email']['send_from_email'],
# TODO Find a safe way to pass creds access_token = TokenGeneration( client_id=config['prosper']['client_id'], client_secret=config['prosper']['client_secret'], ps=config['prosper']['ps'], username=config['prosper']['username']).execute() header = utils.http_header_build(access_token) listings = Listing(header) bid_amt = utils.get_bid_amount( starting_bid_amt=config['bid_size']['bid'], starting_bid_date=config['bid_size']['starting_bid_date'], implement_increasing_recurring_bid=config['bid_size'] ['implement_increasing_recurring_bid'], increase_amt=config['bid_size']['weekly_increase_amt']) account = Accounts(header) cash_balance = account.get_account_response()['available_cash_balance'] #searches for new listings so program can run once new listings are posted # listings_posted = listings.search_for_new_listings(time_to_search=240) # checks to see if new listings are posted # I turned this off and just looped through program on 11/3/20, to see if it helps lower EXPIRED number at all. # EXPIRED NUMBERS: 18.3% last 30 days, 22.6% last 60 days, 19.8% all time. #TODO Make how long to run a command line param time_to_continuously_run_submit_orders = time.time() + 330 sleep = 0.1 runs = 0 #TODO Possible low hanging fruit to reduce "EXPIRED" listings, just loop through and run program instead of checking for new listings... # Run at 10 seconds after the hour, loop through and implement a sleep that's just enough to not get throttled.. # Update, just looping through and accepting getting throttled just run as fast as possible to avoid expire...