def merge_links(archive_path=OUTPUT_DIR, import_path=None, only_new=False): """get new links from file and optionally append them to links in existing archive""" all_links = [] if import_path: # parse and validate the import file raw_links, parser_name = parse_links(import_path) all_links = validate_links(raw_links) # merge existing links in archive_path and new links existing_links = [] if archive_path: existing_links = parse_json_links_index(archive_path) all_links = validate_links(existing_links + all_links) num_new_links = len(all_links) - len(existing_links) if SHOW_PROGRESS: print() print(' > Adding {} new links to index from {} (parsed as {} format)'. format( num_new_links, pretty_path(import_path), parser_name, )) if only_new: return new_links(all_links, existing_links) return all_links
def merge_links(archive_path=OUTPUT_DIR, import_path=None, only_new=False): """get new links from file and optionally append them to links in existing archive""" all_links = [] if import_path: # parse and validate the import file raw_links, parser_name = parse_links(import_path) all_links = validate_links(raw_links) # merge existing links in archive_path and new links existing_links = [] if archive_path: existing_links = parse_json_links_index(archive_path) all_links = validate_links(existing_links + all_links) num_new_links = len(all_links) - len(existing_links) if num_new_links and not only_new: print('{green}[+] [{}] Adding {} new links to index from {} ({} format){reset}'.format( datetime.now().strftime('%Y-%m-%d %H:%M:%S'), num_new_links, pretty_path(import_path), parser_name, **ANSI, )) # else: # print('[*] [{}] No new links added to {}/index.json{}'.format( # datetime.now().strftime('%Y-%m-%d %H:%M:%S'), # archive_path, # ' from {}'.format(import_path) if import_path else '', # **ANSI, # )) if only_new: return new_links(all_links, existing_links) return all_links