def reconcile_new(config, args): reconciliation_uploader = ReconciliationUploader(config) tracking_output = TrackingOutput(config) trackings = tracking_output.get_existing_trackings() reconcilable_trackings = [t for t in trackings if t.reconcile] # start from scratch all_clusters = [] clusters.update_clusters(all_clusters, reconcilable_trackings) order_info_retriever = OrderInfoRetriever(config) fill_billed_costs('Fetching order costs', all_clusters, order_info_retriever, True) all_clusters = clusters.merge_orders(all_clusters) fill_billed_costs('Filling merged order costs', all_clusters, order_info_retriever, False) # add manual PO entries (and only manual ones) reconciliation_uploader.override_pos_and_costs(all_clusters) driver_creator = DriverCreator() group_site_manager = GroupSiteManager(config, driver_creator) trackings_to_cost, po_to_cost = get_new_tracking_pos_costs_maps( config, group_site_manager, args) clusters_by_tracking = map_clusters_by_tracking(all_clusters) merge_by_trackings_tuples(clusters_by_tracking, trackings_to_cost, all_clusters) fill_costs_new(clusters_by_tracking, trackings_to_cost, po_to_cost, args) fill_cancellations(all_clusters, config) reconciliation_uploader.download_upload_clusters_new(all_clusters)
def get_tracking_info_logged_in( self, amazon_url: str, to_email: str) -> List[Tuple[str, Optional[str]]]: email_user = to_email.split("@")[0].lower() profile_base = self.config["profileBase"] driver = None for profile_name in os.listdir(os.path.expanduser(profile_base)): if email_user in profile_name.lower(): dc = DriverCreator() dc.args.no_headless = True driver = dc.new( f"{os.path.expanduser(profile_base)}/{profile_name}") break if driver is None: tqdm.write( f"Couldn't find profile directory for email: {to_email}") return [] try: driver.get(amazon_url) shipment_eles = driver.find_elements_by_css_selector( "div.a-section-expander-container") if len(shipment_eles) == 0: return self.get_trackings_within_shipment( driver, driver.find_element_by_css_selector( "div.a-col-left div.a-color-offset-background span.a-color-base" ).text.strip()) else: trackings = [] for shipment_ele in shipment_eles: delivery_status = shipment_ele.find_element_by_css_selector( "span.a-color-base").text.strip() shipment_ele.click() trackings.extend( self.get_trackings_within_shipment( shipment_ele, delivery_status)) return trackings finally: driver.quit()
def reconcile_new(config, args): reconciliation_uploader = ReconciliationUploader(config) tracking_output = TrackingOutput(config) trackings = tracking_output.get_existing_trackings() reconcilable_trackings = [t for t in trackings if t.reconcile] # start from scratch all_clusters = [] clusters.update_clusters(all_clusters, reconcilable_trackings) order_info_retriever = OrderInfoRetriever(config) fill_billed_costs('Fetching order costs', all_clusters, order_info_retriever, True) all_clusters = clusters.merge_orders(all_clusters) fill_billed_costs('Filling merged order costs', all_clusters, order_info_retriever, False) # add manual PO entries (and only manual ones) reconciliation_uploader.override_pos_and_costs(all_clusters) driver_creator = DriverCreator() group_site_manager = GroupSiteManager(config, driver_creator) trackings_to_info, po_to_cost = get_new_tracking_pos_costs_maps(config, group_site_manager, args) clusters_by_tracking = map_clusters_by_tracking(all_clusters) merge_by_trackings_tuples(clusters_by_tracking, trackings_to_info, all_clusters) fill_costs_new(clusters_by_tracking, trackings_to_info, po_to_cost, args) fill_cancellations(all_clusters, config) et(config, all_clusters) sheet_id = config['reconciliation']['baseSpreadsheetId'] if args.groups: print("Skipping unknown-tracking upload due to the --groups argument") else: upload_unknown_trackings(sheet_id, set(clusters_by_tracking.keys()), trackings_to_info) reconciliation_uploader.download_upload_clusters_new(all_clusters)
def new_driver(profile_base: str, profile_name: str) -> WebDriver: dc = DriverCreator() dc.args.no_headless = True return dc.new(f"{os.path.expanduser(profile_base)}/{profile_name}")
def main(): parser = argparse.ArgumentParser(description='Get tracking #s script') parser.add_argument("--seen", action="store_true") parser.add_argument("--days") args, _ = parser.parse_known_args() driver_creator = DriverCreator() config = open_config() email_config = config['email'] email_sender = EmailSender(email_config) print("Retrieving Amazon tracking numbers from email ...") amazon_tracking_retriever = AmazonTrackingRetriever( config, args, driver_creator) try: trackings = amazon_tracking_retriever.get_trackings() except: send_error_email(email_sender, "Error retrieving Amazon emails") raise print("Retrieving Best Buy tracking numbers from email ...") bestbuy_tracking_retriever = BestBuyTrackingRetriever( config, args, driver_creator) try: trackings.update(bestbuy_tracking_retriever.get_trackings()) except: send_error_email(email_sender, "Error retrieving Best Buy emails") raise try: tracking_output = TrackingOutput(config) existing_tracking_nos = set([ t.tracking_number for t in tracking_output.get_existing_trackings() ]) new_tracking_nos = set( trackings.keys()).difference(existing_tracking_nos) print(f"Found {len(new_tracking_nos)} new tracking numbers " f"(out of {len(trackings)} total) from emails.") new_trackings = [trackings[n] for n in new_tracking_nos] # We only need to process new tracking numbers if there are any; # otherwise skip straight to processing existing locally stored data. if new_trackings: try: email_sender.send_email(new_trackings) except Exception as e: # When running --seen, we're often processing a very large number of emails that can # take a long time, and the Tracking Numbers email isn't too important to us (but the # upload to portals/Sheets definitely is). So don't fail after we've been running for # a long time just on account of a failed email. if args.seen: print( f"Email sending failed with error: {str(e)}\n{util.get_traceback_lines()}" ) print("New trackings are:\n" + "\n".join([str(nt) for nt in new_trackings])) print( "Continuing to portal/Sheet upload because email sending is non-essential." ) else: raise e print("Uploading all tracking numbers...") group_site_manager = GroupSiteManager(config, driver_creator) try: group_site_manager.upload(trackings.values()) except: send_error_email(email_sender, "Error uploading tracking numbers") if args.seen: print("Error uploading tracking numbers; skipping.") else: raise reconcilable_trackings = [t for t in new_trackings if t.reconcile] # Also only add new trackings to the sheet print("Adding results to Google Sheets") tracking_uploader = TrackingUploader(config) try: tracking_uploader.upload_trackings(reconcilable_trackings) except: send_error_email(email_sender, "Error uploading to Google Sheets") if args.seen: print("Error uploading to Google Sheets; skipping.") else: raise print("Writing results to file") try: tracking_output.save_trackings(new_trackings) except: send_error_email(email_sender, "Error writing output file") raise print("Done") except: print("Exception thrown after looking at the emails.") if not args.seen: print("Marking all relevant emails as unread to reset.") amazon_tracking_retriever.back_out_of_all() bestbuy_tracking_retriever.back_out_of_all() if not args.seen: print("Marked all as unread.") raise
def main(): parser = argparse.ArgumentParser( description='Importing Amazon reports from CSV or Drive') parser.add_argument("--personal", "-p", action="store_true", help="Use the personal CSV format") parser.add_argument("globs", nargs="*") args, _ = parser.parse_known_args() from_row_function = from_personal_row if args.personal else from_amazon_row all_trackings = [] if args.globs: for glob_input in args.globs: files = glob.glob(glob_input) for file in files: all_trackings.extend( read_trackings_from_file(file, from_row_function)) else: sheet_id = get_required("Enter Google Sheet ID: ") tab_name = get_required("Enter the name of the tab within the sheet: ") objects_to_sheet = ObjectsToSheet() all_trackings.extend( objects_to_sheet.download_from_sheet(from_amazon_row, sheet_id, tab_name)) if len(all_trackings) == 0: print("Nothing to import; terminating.") return num_n_a_trackings = len([ ignored for ignored in all_trackings if ignored and ignored.tracking_number == 'N/A' ]) num_empty_trackings = len([ ignored for ignored in all_trackings if ignored and ignored.tracking_number == '' ]) print(f'Skipping {num_n_a_trackings} for N/A tracking column and ' f'{num_empty_trackings} for empty tracking column.') all_trackings = [ tracking for tracking in all_trackings if tracking and tracking.tracking_number != 'N/A' and tracking.tracking_number != '' ] len_non_reconcilable_trackings = len( [t for t in all_trackings if not t.reconcile]) print( f'Skipping {len_non_reconcilable_trackings} non-reconcilable trackings.' ) all_trackings = [t for t in all_trackings if t.reconcile] base_len_trackings = len(all_trackings) all_trackings = dedupe_trackings(all_trackings) print( f'Filtered {base_len_trackings - len(all_trackings)} duplicate trackings from the sheet.' ) print('Uploading trackings to Sheets...') tracking_uploader = TrackingUploader(config) tracking_uploader.upload_trackings(all_trackings) tracking_output = TrackingOutput(config) trackings_before_save = { t.tracking_number for t in tracking_output.get_existing_trackings() } print(f"Number of trackings before: {len(trackings_before_save)}.") print(f"Number imported from report(s): {len(all_trackings)}.") tracking_output.save_trackings(all_trackings) trackings_after_save = { t.tracking_number: t for t in tracking_output.get_existing_trackings() } print(f"Number of trackings after: {len(trackings_after_save)}.") new_trackings = set( trackings_after_save.keys()).difference(trackings_before_save) print(f"Number of new-to-us trackings: {len(new_trackings)}") new_tracking_objects = [trackings_after_save[t] for t in new_trackings] email_config = config['email'] email_sender = EmailSender(email_config) email_sender.send_email(new_tracking_objects) print("Uploading new trackings to the group(s)' site(s)...") group_site_manager = GroupSiteManager(config, DriverCreator()) group_site_manager.upload(new_tracking_objects)
def main(): parser = argparse.ArgumentParser( description='Importing Amazon reports from CSV or Drive') parser.add_argument("globs", nargs="*") args, _ = parser.parse_known_args() all_trackings = [] if args.globs: for glob_input in args.globs: files = glob.glob(glob_input) for file in files: all_trackings.extend(read_trackings_from_file(file)) else: sheet_id = get_required("Enter Google Sheet ID: ") tab_name = get_required("Enter the name of the tab within the sheet: ") objects_to_sheet = ObjectsToSheet() all_trackings.extend( objects_to_sheet.download_from_sheet(from_amazon_row, sheet_id, tab_name)) num_n_a_trackings = len([ ignored for ignored in all_trackings if ignored and ignored.tracking_number == 'N/A' ]) num_empty_trackings = len([ ignored for ignored in all_trackings if ignored and ignored.tracking_number == '' ]) print( f'Skipping {num_n_a_trackings} for n/a tracking column and {num_empty_trackings} for empty tracking column' ) all_trackings = [ tracking for tracking in all_trackings if tracking and tracking.tracking_number != 'N/A' and tracking.tracking_number != '' ] len_non_reconcilable_trackings = len( [t for t in all_trackings if not t.reconcile]) print( f'Skipping {len_non_reconcilable_trackings} non-reconcilable trackings' ) all_trackings = [t for t in all_trackings if t.reconcile] base_len_trackings = len(all_trackings) all_trackings = dedupe_trackings(all_trackings) print( f'Filtered {base_len_trackings - len(all_trackings)} duplicate trackings from the sheet' ) print('Uploading trackings to Sheets...') tracking_uploader = TrackingUploader(config) tracking_uploader.upload_trackings(all_trackings) tracking_output = TrackingOutput(config) print("Number of trackings beforehand: %d" % len(tracking_output.get_existing_trackings())) print("Number from sheet: %d" % len(all_trackings)) tracking_output.save_trackings(all_trackings) print("Number of trackings after: %d" % len(tracking_output.get_existing_trackings())) print("Uploading to the group(s)' site(s)...") group_site_manager = GroupSiteManager(config, DriverCreator()) group_site_manager.upload(all_trackings)