def update_archive_data(import_path=None, resume=None): """The main ArchiveBox entrancepoint. Everything starts here.""" check_dependencies() # Step 1: Load list of links from the existing index # merge in and dedupe new links from import_path all_links, new_links = load_links_index(out_dir=OUTPUT_DIR, import_path=import_path) # Step 2: Write updated index with deduped old and new links back to disk write_links_index(out_dir=OUTPUT_DIR, links=all_links) # Step 3: Run the archive methods for each link links = new_links if ONLY_NEW else all_links log_archiving_started(len(links), resume) idx, link = 0, 0 try: for idx, link in enumerate(links_after_timestamp(links, resume)): link_dir = os.path.join(ARCHIVE_DIR, link['timestamp']) archive_link(link_dir, link) except KeyboardInterrupt: log_archiving_paused(len(links), idx, link and link['timestamp']) raise SystemExit(0) except: print() raise log_archiving_finished(len(links)) # Step 4: Re-write links index with updated titles, icons, and resources all_links, _ = load_links_index(out_dir=OUTPUT_DIR) write_links_index(out_dir=OUTPUT_DIR, links=all_links, finished=True)
def update_archive_data(import_path=None, resume=None): """Main entrance point """ # step 1: load list of links form existing index # merge in and dedupe new links from import path all_links, new_links = load_links_index(out_dir=OUTPUT_DIR, import_path=import_path) # step 2: write updated index with deduped old and new links back to disk write_links_index(out_dir=OUTPUT_DIR, links=all_links) # step 3: run the archive methods for each link links = new_links if ONLY_NEW else all_links log_archiving_started(len(links), resume) # important note here - iterate through (if keyboard interrupt print out the stop) idx, link = 0, 0 try: for idx, link in enumerate(links_after_timestamp(links, resume)): links_dir = os.path.join(ARCHIVE_DIR, link['timestamp']) archive_link(link_dir, link) except KeyboardInterrupt: log_archiving_paused(len(links), idx, link and link['timestamp']) raise SystemExit(0) except: print() raise log_archiving_finished(len(links)) # Step 4: Re-write links index with updated titles, icons, and resources all_links, _ = load_links_index(out_dir=OUTPUT_DIR) write_links_index(out_dir=OUTPUT_DIR, links=all_links, finished=True)
def y_main(url): ### Handle CLI arguments # ./archive bookmarks.html # ./archive 1523422111.234 resume = None ### Set up output folder if not os.path.exists(OUTPUT_DIR): os.makedirs(OUTPUT_DIR) ### Handle ingesting urls from a remote file/feed # (e.g. if an RSS feed URL is used as the import path) if url and any( url.startswith(s) for s in ('http://', 'https://', 'ftp://')): import_path = save_remote_source(url) ### Run the main archive update process """The main ArchiveBox entrancepoint. Everything starts here.""" links = [{ 'url': url, 'timestamp': str(datetime.now().timestamp()), 'title': None, 'tags': '', 'sources': [import_path] }] log_archiving_started(len(links), resume) idx, link = 0, 0 try: for idx, link in enumerate(links_after_timestamp(links, resume)): link_dir = os.path.join(ARCHIVE_DIR, link['timestamp']) archive_link(link_dir, link) except KeyboardInterrupt: log_archiving_paused(len(links), idx, link and link['timestamp']) raise SystemExit(0) except: print() raise log_archiving_finished(len(links)) # Step 4: Re-write links index with updated titles, icons, and resources all_links, _ = load_links_index(out_dir=OUTPUT_DIR) write_links_index(out_dir=OUTPUT_DIR, links=all_links, finished=True)