def write_html_links_index(out_dir, links): """write the html link index to a given path""" path = os.path.join(out_dir, 'index.html') copy_tree(TEMPLATE_STATICFILES, os.path.join(out_dir, "static")) with open(INDEX_TEMPLATE, 'r', encoding='utf-8') as f: index_html = f.read() with open(INDEX_ROW_TEMPLATE, 'r', encoding='utf-8') as f: link_row_html = f.read() link_rows = '\n'.join( Template(link_row_html).substitute(**derived_link_info(link)) for link in links ) template_vars = { 'num_links': len(links), 'date_updated': datetime.now().strftime('%Y-%m-%d'), 'time_updated': datetime.now().strftime('%Y-%m-%d %H:%M'), 'footer_info': FOOTER_INFO, 'git_sha': GIT_SHA, 'short_git_sha': GIT_SHA[:8], 'rows': link_rows, } with open(path, 'w', encoding='utf-8') as f: f.write(Template(index_html).substitute(**template_vars)) chmod_file(path)
def write_html_link_index(out_dir, link): check_link_structure(link) with open(os.path.join(TEMPLATES_DIR, 'link_index.html'), 'r', encoding='utf-8') as f: link_html = f.read() path = os.path.join(out_dir, 'index.html') link = derived_link_info(link) with open(path, 'w', encoding='utf-8') as f: f.write(Template(link_html).substitute({ **link, 'title': ( link['title'] or (link['base_url'] if link['is_archived'] else TITLE_LOADING_MSG) ), 'archive_url': urlencode( wget_output_path(link) or (link['domain'] if link['is_archived'] else 'about:blank') ), 'extension': link['extension'] or 'html', 'tags': link['tags'].strip() or 'untagged', 'status': 'Archived' if link['is_archived'] else 'Not yet archived', 'status_color': 'success' if link['is_archived'] else 'danger', })) chmod_file(path)
def write_html_links_index(out_dir, links): """write the html link index to a given path""" path = os.path.join(out_dir, 'index.html') with open(INDEX_TEMPLATE, 'r', encoding='utf-8') as f: index_html = f.read() with open(INDEX_ROW_TEMPLATE, 'r', encoding='utf-8') as f: link_row_html = f.read() link_rows = '\n'.join( Template(link_row_html).substitute(**derived_link_info(link)) for link in links ) template_vars = { 'num_links': len(links), 'date_updated': datetime.now().strftime('%Y-%m-%d'), 'time_updated': datetime.now().strftime('%Y-%m-%d %H:%M'), 'rows': link_rows, } with open(path, 'w', encoding='utf-8') as f: f.write(Template(index_html).substitute(**template_vars)) chmod_file(path)
def write_html_link_index(out_dir, link): check_link_structure(link) with open(os.path.join(TEMPLATES_DIR, 'link_index.html'), 'r', encoding='utf-8') as f: link_html = f.read() path = os.path.join(out_dir, 'index.html') print(' √ index.html') link = derived_link_info(link) with open(path, 'w', encoding='utf-8') as f: f.write(Template(link_html).substitute({ **link, 'title': ( link['title'] or (link['base_url'] if link['is_archived'] else TITLE_LOADING_MSG) ), 'archive_url': ( wget_output_path(link) or (link['domain'] if link['is_archived'] else 'about:blank') ), })) chmod_file(path)
def write_html_links_index(out_dir, links, finished=False): """write the html link index to a given path""" check_links_structure(links) path = os.path.join(out_dir, 'index.html') copy_tree(os.path.join(TEMPLATES_DIR, 'static'), os.path.join(out_dir, 'static')) with open(os.path.join(out_dir, 'robots.txt'), 'w+') as f: f.write('User-agent: *\nDisallow: /') with open(os.path.join(TEMPLATES_DIR, 'index.html'), 'r', encoding='utf-8') as f: index_html = f.read() with open(os.path.join(TEMPLATES_DIR, 'index_row.html'), 'r', encoding='utf-8') as f: link_row_html = f.read() full_links_info = (derived_link_info(link) for link in links) link_rows = '\n'.join( Template(link_row_html).substitute( **{ **link, 'title': ( link['title'] or (link['base_url'] if link['is_archived'] else TITLE_LOADING_MSG)), 'favicon_url': ( os.path.join('archive', link['timestamp'], 'favicon.ico') # if link['is_archived'] else 'data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs=' ), 'archive_url': urlencode(wget_output_path(link) or 'index.html'), }) for link in full_links_info) template_vars = { 'num_links': len(links), 'date_updated': datetime.now().strftime('%Y-%m-%d'), 'time_updated': datetime.now().strftime('%Y-%m-%d %H:%M'), 'footer_info': FOOTER_INFO, 'git_sha': GIT_SHA, 'short_git_sha': GIT_SHA[:8], 'rows': link_rows, 'status': 'finished' if finished else 'running', } with open(path, 'w', encoding='utf-8') as f: f.write(Template(index_html).substitute(**template_vars)) chmod_file(path)
def write_html_link_index(out_dir, link): check_link_structure(link) with open(os.path.join(TEMPLATES_DIR, 'link_index.html'), 'r', encoding='utf-8') as f: link_html = f.read() path = os.path.join(out_dir, 'index.html') print(' √ index.html') with open(path, 'w', encoding='utf-8') as f: f.write(Template(link_html).substitute({ **derived_link_info(link), # **link['latest'], })) chmod_file(path)
def write_html_links_index(out_dir, links): """write the html link index to a given path""" check_links_structure(links) path = os.path.join(out_dir, 'index.html') copy_tree(os.path.join(TEMPLATES_DIR, 'static'), os.path.join(out_dir, 'static')) with open(os.path.join(out_dir, 'robots.txt'), 'w+') as f: f.write('User-agent: *\nDisallow: /') with open(os.path.join(TEMPLATES_DIR, 'index.html'), 'r', encoding='utf-8') as f: index_html = f.read() with open(os.path.join(TEMPLATES_DIR, 'index_row.html'), 'r', encoding='utf-8') as f: link_row_html = f.read() link_rows = '\n'.join( Template(link_row_html).substitute(**derived_link_info(link)) for link in links) template_vars = { 'num_links': len(links), 'date_updated': datetime.now().strftime('%Y-%m-%d'), 'time_updated': datetime.now().strftime('%Y-%m-%d %H:%M'), 'footer_info': FOOTER_INFO, 'git_sha': GIT_SHA, 'short_git_sha': GIT_SHA[:8], 'rows': link_rows, } with open(path, 'w', encoding='utf-8') as f: f.write(Template(index_html).substitute(**template_vars)) chmod_file(path)