def get_categories(self): # Create a map of Mint category name to category id. logger.info('Creating Mint Category Map.') mint_api = self.get_mintapi() asyncSpin = AsyncProgress(Spinner('Fetching Categories ')) categories = dict([ (cat_dict['name'], cat_id) for (cat_id, cat_dict) in mint_api.get_categories().items()]) asyncSpin.finish() return categories
def get_transactions(self, start_date): start_date_str = start_date.strftime('%m/%d/%y') mint_api = self.get_mintapi() logger.info('Get all Mint transactions since {}.'.format( start_date_str)) asyncSpin = AsyncProgress(Spinner('Fetching Transactions ')) transactions = mint_api.get_transactions_json( start_date=start_date_str, include_investment=False, skip_duplicates=True) asyncSpin.finish() return transactions
def dump_trans_and_categories(trans, cats, pickle_epoch, pickle_base_path): label = 'Backing up Mint to local pickle file, epoch: {} '.format( pickle_epoch) asyncSpin = AsyncProgress(Spinner(label)) if not os.path.exists(pickle_base_path): os.makedirs(pickle_base_path) trans_pickle_path = os.path.join( pickle_base_path, MINT_TRANS_PICKLE_FMT.format(pickle_epoch)) cats_pickle_path = os.path.join(pickle_base_path, MINT_CATS_PICKLE_FMT.format(pickle_epoch)) with open(trans_pickle_path, 'wb') as f: pickle.dump(trans, f) with open(cats_pickle_path, 'wb') as f: pickle.dump(cats, f) asyncSpin.finish()
def get_trans_and_categories_from_pickle(pickle_epoch, pickle_base_path): label = 'Un-pickling Mint transactions from epoch: {} '.format( pickle_epoch) asyncSpin = AsyncProgress(Spinner(label)) trans_pickle_path = os.path.join( pickle_base_path, MINT_TRANS_PICKLE_FMT.format(pickle_epoch)) cats_pickle_path = os.path.join(pickle_base_path, MINT_CATS_PICKLE_FMT.format(pickle_epoch)) with open(trans_pickle_path, 'rb') as f: trans = pickle.load(f) with open(cats_pickle_path, 'rb') as f: cats = pickle.load(f) asyncSpin.finish() return trans, cats
def fetch_order_history(report_download_path, start_date, end_date, email=None, password=None, session_path=None, headless=False): email = get_email(email) name = email.split('@')[0] report_shortnames = ['Items', 'Orders', 'Refunds'] report_names = [ '{} {} from {:%d %b %Y} to {:%d %b %Y}'.format(name, t, start_date, end_date) for t in report_shortnames ] report_types = ['ITEMS', 'SHIPMENTS', 'REFUNDS'] report_paths = [ os.path.join(report_download_path, name + '.csv') for name in report_names ] if not os.path.exists(report_download_path): os.makedirs(report_download_path) # Be lazy with getting the driver, as if no fetching is needed, then it's # all good. driver = None for report_shortname, report_type, report_name, report_path in zip( report_shortnames, report_types, report_names, report_paths): if os.path.exists(report_path): # Report has already been fetched! Woot continue # Report is not here. Go get it if not driver: driver = get_amzn_driver(email, password, headless=headless, session_path=session_path) requestSpin = AsyncProgress( Spinner('Requesting {} report '.format(report_shortname))) request_report(driver, report_name, report_type, start_date, end_date) requestSpin.finish() processingSpin = AsyncProgress( Spinner( 'Waiting for {} report to be ready '.format(report_shortname))) try: wait_cond = EC.presence_of_element_located( (By.XPATH, get_report_download_link_xpath(report_name))) WebDriverWait(driver, ORDER_HISTORY_PROCESS_TIMEOUT_S).until(wait_cond) processingSpin.finish() except TimeoutException: processingSpin.finish() logger.critical("Cannot find download link after a minute!") exit(1) downloadSpin = AsyncProgress( Spinner('Downloading {} report '.format(report_shortname))) download_report(driver, report_name, report_path) downloadSpin.finish() logger.info('\nAll Amazon history has been fetched. Onto tagging.') if driver: driver.close() return (open(report_paths[0], 'r', encoding='utf-8'), open(report_paths[1], 'r', encoding='utf-8'), open(report_paths[2], 'r', encoding='utf-8'))