Beispiel #1
0
def main(*args):
    if set(args).intersection(('-h', '--help', 'help')) or len(args) > 2:
        print_help()
        raise SystemExit(0)

    if set(args).intersection(('--version', 'version')):
        print('ArchiveBox version {}'.format(__VERSION__))
        raise SystemExit(0)

    ### Handle CLI arguments
    #     ./archive bookmarks.html
    #     ./archive 1523422111.234
    import_path, resume = None, None
    if len(args) == 2:
        # if the argument is a string, it's a import_path file to import
        # if it's a number, it's a timestamp to resume archiving from
        if args[1].replace('.', '').isdigit():
            import_path, resume = None, args[1]
        else:
            import_path, resume = args[1], None

    ### Set up output folder
    if not os.path.exists(OUTPUT_DIR):
        os.makedirs(OUTPUT_DIR)

    ### Handle ingesting urls piped in through stdin
    # (.e.g if user does cat example_urls.txt | ./archive)
    if not sys.stdin.isatty():
        stdin_raw_text = sys.stdin.read()
        if stdin_raw_text and import_path:
            print('[X] You should pass either a path as an argument, '
                  'or pass a list of links via stdin, but not both.\n')
            print_help()
            raise SystemExit(1)
        if stdin_raw_text:
            import_path = save_stdin_source(stdin_raw_text)

    ### Handle ingesting urls from a remote file/feed
    # (e.g. if an RSS feed URL is used as the import path)
    if import_path and any(
            import_path.startswith(s)
            for s in ('http://', 'https://', 'ftp://')):
        import_path = save_remote_source(import_path)

    ### Run the main archive update process
    update_archive_data(import_path=import_path, resume=resume)
Beispiel #2
0
def y_main(url):

    ### Handle CLI arguments
    #     ./archive bookmarks.html
    #     ./archive 1523422111.234
    resume = None
    ### Set up output folder
    if not os.path.exists(OUTPUT_DIR):
        os.makedirs(OUTPUT_DIR)

    ### Handle ingesting urls from a remote file/feed
    # (e.g. if an RSS feed URL is used as the import path)
    if url and any(
            url.startswith(s) for s in ('http://', 'https://', 'ftp://')):
        import_path = save_remote_source(url)

    ### Run the main archive update process
    """The main ArchiveBox entrancepoint. Everything starts here."""

    links = [{
        'url': url,
        'timestamp': str(datetime.now().timestamp()),
        'title': None,
        'tags': '',
        'sources': [import_path]
    }]
    log_archiving_started(len(links), resume)
    idx, link = 0, 0
    try:
        for idx, link in enumerate(links_after_timestamp(links, resume)):
            link_dir = os.path.join(ARCHIVE_DIR, link['timestamp'])
            archive_link(link_dir, link)

    except KeyboardInterrupt:
        log_archiving_paused(len(links), idx, link and link['timestamp'])
        raise SystemExit(0)

    except:
        print()
        raise

    log_archiving_finished(len(links))

    # Step 4: Re-write links index with updated titles, icons, and resources
    all_links, _ = load_links_index(out_dir=OUTPUT_DIR)
    write_links_index(out_dir=OUTPUT_DIR, links=all_links, finished=True)
Beispiel #3
0
        source, resume = sys.argv[1].strip(), sys.argv[2]
    else:
        print_help()
        raise SystemExit(1)

    # See if archive folder already exists
    for out_dir in (OUTPUT_DIR, 'bookmarks', 'pocket', 'pinboard', 'html'):
        if os.path.exists(out_dir):
            break
    else:
        out_dir = OUTPUT_DIR

    # Step 0: Download url to local file (only happens if a URL is specified instead of local path)
    if source and any(
            source.startswith(s) for s in ('http://', 'https://', 'ftp://')):
        source = save_remote_source(source)
    elif stdin_raw_text:
        source = save_stdin_source(stdin_raw_text)

    # Step 1: Parse the links and dedupe them with existing archive
    all_links, new_links = load_links(archive_path=out_dir, import_path=source)

    # Step 2: Write new index
    write_links_index(out_dir=out_dir, links=all_links)

    # Step 3: Run the archive methods for each link
    if ONLY_NEW:
        update_archive(out_dir,
                       new_links,
                       source=source,
                       resume=resume,