def install_theme(argv): if len(argv) < 1: die(messages.missing_params.format("--install-themes")) blog_configuration = get_blog_configuration() if blog_configuration == None: notify(messages.no_blog_configuration) return new_folder_name = "theme " + str(datetime.datetime.now()).replace(':', '-') try: shutil.move("theme", new_folder_name) except: pass try: shutil.copytree( os.path.expanduser("~") + "/.local/share/VenC/themes/" + argv[0], "theme") except FileNotFoundError as e: ''' Restore previous states ''' try: shutil.move(new_folder_name, "theme") die(messages.theme_doesnt_exists.format("'" + argv[0] + "'")) except Exception as e: die(str(e)) notify(messages.theme_installed)
def setup_category_context(self, i, root, len_root): node = root[i] if node.value in self.disable_threads: return (None, None, None) if i == len_root - 1: tree_special_char = '└' else: tree_special_char = '├' notify(self.indentation_level + tree_special_char + "─ " + node.value + "...") export_path = self.export_path category_value = self.category_value self.category_value += node.value + '/' self.export_path += str(node.value + '/').replace(' ', '-') self.relative_origin = ''.join([ '../' for f in self.export_path.split("/")[1:] if f != '' ]).replace("//", '/') try: os.makedirs(self.export_path) except FileExistsError: pass return (node, export_path, category_value)
def remote_copy(argv=list()): blog_configuration = get_blog_configuration() try: ftp = ftplib.FTP(blog_configuration["ftp_host"]) ftp.encoding='latin-1' except socket.gaierror as e: die(str(e)) username = input("VenC: "+messages.username) user_passwd = getpass.getpass(prompt="VenC: "+messages.user_passwd) try: ftp.login(user=username,passwd=user_passwd) ftp.cwd(blog_configuration["path"]["ftp"]) notify(messages.clean_ftp_directory) ftp_clean_destination(ftp) notify(messages.copy_to_ftp_directory) ftp_export_recursively(os.getcwd()+"/blog", ftp) except TimeoutError as e: die(str(e)) except ftplib.error_perm as e: die(str(e), color="YELLOW")
def __init__(self, prompt, datastore, theme, patterns_map): self.indentation_level = "│ " self.patterns_map = patterns_map self.datastore = datastore # Notify wich thread is processed if prompt != "": notify("├─ "+prompt) self.forbidden = patterns_map.non_contextual_entries_keys self.entries_per_page = int(datastore.blog_configuration["entries_per_pages"]) self.disable_threads = datastore.disable_threads # Setup useful data self.theme = theme self.footer = deepcopy(self.theme.footer) self.header = deepcopy(self.theme.header) self.entry = deepcopy(self.theme.entry) self.context_header = "header.html" self.context_footer = "footer.html" self.content_type = "html" self.column_opening = '<div id="__VENC_COLUMN_{0}__" class="__VENC_COLUMN__">' self.column_closing = "</div>" self.columns_number = self.datastore.blog_configuration["columns"] # Setup pattern processor self.processor = Processor() for pattern_name in patterns_map.contextual["functions"].keys(): self.processor.set_function(pattern_name, patterns_map.contextual["functions"][pattern_name]) for pattern_name in patterns_map.contextual["names"].keys(): self.processor.set_function(pattern_name, getattr(self, patterns_map.contextual["names"][pattern_name]))
def __init__(self, prompt, datastore, theme, patterns_map): self.indentation_level = "│ " self.patterns_map = patterns_map self.datastore = datastore # Notify wich thread is processed if prompt != "": notify("├─ " + prompt) self.forbidden = patterns_map.non_contextual_entries_keys self.entries_per_page = int( datastore.blog_configuration["entries_per_pages"]) self.disable_threads = datastore.disable_threads # Setup useful data self.theme = theme self.footer = deepcopy(self.theme.footer) self.header = deepcopy(self.theme.header) self.entry = deepcopy(self.theme.entry) self.context_header = "header.html" self.context_footer = "footer.html" self.content_type = "html" self.column_opening = '<div id="__VENC_COLUMN_{0}__" class="__VENC_COLUMN__">' self.column_closing = "</div>" self.columns_number = self.datastore.blog_configuration["columns"] # Setup pattern processor self.processor = Processor() for pattern_name in patterns_map.contextual["functions"].keys(): self.processor.set_function( pattern_name, patterns_map.contextual["functions"][pattern_name]) for pattern_name in patterns_map.contextual["names"].keys(): self.processor.set_function( pattern_name, getattr(self, patterns_map.contextual["names"][pattern_name]))
def new_entry(argv): if len(argv) < 1: die(messages.missing_params.format("--new-entry")) blog_configuration = get_blog_configuration() content = {"authors": "", "tags": "", "categories": "", "title":argv[0]} try: wd = os.listdir(os.getcwd()) except OSError: die(messages.cannot_read_in.format(os.getcwd())) date = datetime.datetime.now() entry = dict() raw_entry_date = datetime.datetime.now() try: entry["ID"] = max([ int(filename.split("__")[0]) for filename in yield_entries_content()]) + 1 except ValueError: entry["ID"] = 1 entry["title"] = argv[0] entry["month"] = raw_entry_date.month entry["year"] = raw_entry_date.year entry["day"] = raw_entry_date.day entry["hour"] = raw_entry_date.hour entry["minute"] = raw_entry_date.minute entry["date"] = raw_entry_date entry_date = str(date.month)+'-'+str(date.day)+'-'+str(date.year)+'-'+str(date.hour)+'-'+str(date.minute) output_filename = os.getcwd()+'/entries/'+str(entry["ID"])+"__"+entry_date+"__"+entry["title"].replace(' ','_') stream = codecs.open(output_filename, 'w', encoding="utf-8") if len(argv) == 1: output = yaml.dump(content, default_flow_style=False, allow_unicode=True) + "---VENC-BEGIN-PREVIEW---\n---VENC-END-PREVIEW---\n" else: try: output = open(os.getcwd()+'/templates/'+argv[1], 'r').read().replace(".:GetEntryTitle:.", argv[0]) except FileNotFoundError as e: die(messages.file_not_found.format(os.getcwd()+"/templates/"+argv[1])) stream.write(output) stream.close() try: command = [arg for arg in blog_configuration["text_editor"].split(' ') if arg != ''] command.append(output_filename) subprocess.call(command) except FileNotFoundError: die(messages.unknown_command.format(blog_configuration["text_editor"])) notify(messages.entry_written)
def remote_copy(argv=list()): blog_configuration = get_blog_configuration() try: ftp = ftplib.FTP(blog_configuration["ftp_host"]) ftp.encoding = 'latin-1' except socket.gaierror as e: die(str(e)) username = input("VenC: " + messages.username) user_passwd = getpass.getpass(prompt="VenC: " + messages.user_passwd) try: ftp.login(user=username, passwd=user_passwd) ftp.cwd(blog_configuration["path"]["ftp"]) notify(messages.clean_ftp_directory) ftp_clean_destination(ftp) notify(messages.copy_to_ftp_directory) ftp_export_recursively(os.getcwd() + "/blog", ftp) except TimeoutError as e: die(str(e)) except ftplib.error_perm as e: die(str(e), color="YELLOW")
def do(self): len_archives = len(self.datastore.entries_per_archives) for i in range(0, len_archives): archive = self.setup_archive_context(i, len_archives) if archive == None: continue super().do() if self.datastore.enable_jsonld or self.datastore.enable_jsonp: from venc2.l10n import messages notify("│\t "+('│' if i != len_archives-1 else ' ')+" └─ "+messages.generating_jsonld_doc) blog_url = self.datastore.blog_configuration["blog_url"] archive_as_jsonld = self.datastore.archives_as_jsonld[archive.value] archive_as_jsonld["breadcrumb"]["itemListElement"].append({ "@type": "ListItem", "position": 2, "item": { "@id": blog_url+'/'+self.sub_folders+archive.value+"/archives.jsonld", "url": blog_url+'/'+self.sub_folders+archive.value, "name": self.datastore.blog_configuration["blog_name"] +' | '+archive.value } }) archive_as_jsonld["@id"] = blog_url+'/'+self.sub_folders+archive.value+"/archives.jsonld" archive_as_jsonld["url"] = blog_url+'/'+self.sub_folders+archive.value dump = json.dumps(archive_as_jsonld) f = open("blog/"+self.sub_folders+'/'+archive.value+"/archives.jsonld", 'w') f.write(dump)
def setup_category_context(self, i, root, len_root): node = root[i] if node.value in self.disable_threads: return (None, None, None) if i == len_root-1: tree_special_char = '└' else: tree_special_char = '├' notify(self.indentation_level+tree_special_char+"─ "+node.value+"...") export_path = self.export_path category_value = self.category_value self.category_value += node.value+'/' self.export_path += str(node.value+'/').replace(' ','-') self.relative_origin = ''.join([ '../' for f in self.export_path.split("/")[1:] if f != '' ]).replace("//",'/') try: os.makedirs(self.export_path) except FileExistsError: pass return (node, export_path, category_value)
def yield_entries_content(): try: for filename in os.listdir(os.getcwd()+"/entries"): exploded_filename = filename.split("__") try: date = exploded_filename[1].split('-') entry_id = int(exploded_filename[0]) datetime.datetime( year=int(date[2]), month=int(date[0]), day=int(date[1]), hour=int(date[3]), minute=int(date[4]) ) if entry_id >= 0: yield filename else: raise ValueError except ValueError: notify(messages.invalid_entry_filename.format(filename), "YELLOW") except IndexError: notify(messages.invalid_entry_filename.format(filename), "YELLOW") except FileNotFoundError: die(messages.file_not_found.format(os.getcwd()+"/entries"))
def do_jsonld(self, node, tree_special_char): from venc2.l10n import messages import json notify(self.indentation_level+tree_special_char+' '+ (' ├─ ' if len(node.childs) else ' └─ ')+messages.generating_jsonld_doc) blog_url = self.datastore.blog_configuration["blog_url"] category_as_jsonld = self.datastore.categories_as_jsonld[self.category_value] position = 2 category_breadcrumb_path = '' for sub_category in self.category_value.split('/'): category_breadcrumb_path += sub_category+'/' category_as_jsonld["breadcrumb"]["itemListElement"].append({ "@type": "ListItem", "position": position, "item": { "@id": blog_url+'/'+self.sub_folders+category_breadcrumb_path+"categories.jsonld", "url": blog_url+'/'+self.sub_folders+category_breadcrumb_path, "name": self.datastore.blog_configuration["blog_name"] +' | '+ sub_category } }) position += 1 category_as_jsonld["@id"] = blog_url+'/'+self.sub_folders+self.category_value+"categories.jsonld" category_as_jsonld["url"] = blog_url+'/'+self.sub_folders+self.category_value dump = json.dumps(category_as_jsonld) f = open(("blog/"+self.sub_folders+self.category_value+"categories.jsonld").replace(' ','-'), 'w') f.write(dump)
def build_categories_tree(entry_index, input_list, output_tree, output_leaves, max_weight, set_max_weight=None, encoding="utf-8", sub_folders=''): for category in input_list: branch = category.split(' > ') if not len(branch): continue leave = branch[-1] path = sub_folders root = output_tree for node_name in branch: if node_name == '': continue path += str(node_name+'/').replace(' ','-') if not node_name in [metadata.value for metadata in root]: root.append(MetadataNode(node_name, entry_index)) if output_leaves != None and node_name == leave: output_leaves.append(root[-1]) try: root[-1].path = ".:GetRelativeOrigin:."+urllib.parse.quote(path, encoding=encoding) except UnicodeEncodeError as e: root[-1].path = ".:GetRelativeOrigin:."+path notify("\"{0}\": ".format(root[-1].path)+str(e), color="YELLOW") root = root[-1].childs else: for node in root: if node.value == node_name: node.count +=1 if set_max_weight != None and node.count > max_weight: max_weight = set_max_weight(node.count) node.related_to.append(entry_index) root = node.childs
def do(self): len_archives = len(self.datastore.entries_per_dates) for i in range(0, len_archives): archive = self.setup_archive_context(i, len_archives) if archive == None: continue super().do() if self.datastore.enable_jsonld or self.enable_jsonp: from venc2.l10n import messages notify("│\t "+('│' if i != len_archives-1 else ' ')+" └─ "+messages.generating_jsonld_doc) blog_url = self.datastore.blog_configuration["blog_url"] archive_as_jsonld = self.datastore.archives_as_jsonld[archive.value] archive_as_jsonld["breadcrumb"]["itemListElement"].append({ "@type": "ListItem", "position": 2, "item": { "@id": blog_url+'/'+self.sub_folders+archive.value+"/archives.jsonld", "url": blog_url+'/'+self.sub_folders+archive.value, "name": self.datastore.blog_configuration["blog_name"] +' | '+archive.value } }) archive_as_jsonld["@id"] = blog_url+'/'+self.sub_folders+archive.value+"/archives.jsonld" archive_as_jsonld["url"] = blog_url+'/'+self.sub_folders+archive.value dump = json.dumps(archive_as_jsonld) f = open("blog/"+self.sub_folders+'/'+archive.value+"/archives.jsonld", 'w') f.write(dump)
def new_entry(argv): blog_configuration = get_blog_configuration() if len(argv) < 1: die(messages.missing_params.format("--new-entry")) content = {"authors": "", "tags": "", "categories": "", "title":argv[0]} try: wd = os.listdir(os.getcwd()) except OSError: die(messages.cannot_read_in.format(os.getcwd())) date = datetime.datetime.now() entry = dict() raw_entry_date = datetime.datetime.now() try: entry["ID"] = max([ int(filename.split("__")[0]) for filename in yield_entries_content()]) + 1 except ValueError: entry["ID"] = 1 entry["title"] = argv[0] entry["month"] = raw_entry_date.month entry["year"] = raw_entry_date.year entry["day"] = raw_entry_date.day entry["hour"] = raw_entry_date.hour entry["minute"] = raw_entry_date.minute entry["date"] = raw_entry_date entry_date = str(date.month)+'-'+str(date.day)+'-'+str(date.year)+'-'+str(date.hour)+'-'+str(date.minute) output_filename = os.getcwd()+'/entries/'+str(entry["ID"])+"__"+entry_date+"__"+entry["title"].replace(' ','_') stream = codecs.open(output_filename, 'w', encoding="utf-8") if len(argv) == 1: output = yaml.dump(content, default_flow_style=False, allow_unicode=True) + "---VENC-BEGIN-PREVIEW---\n---VENC-END-PREVIEW---\n" else: try: output = open(os.getcwd()+'/templates/'+argv[1], 'r').read().replace(".:GetEntryTitle:.", argv[0]) except FileNotFoundError as e: die(messages.file_not_found.format(os.getcwd()+"/templates/"+argv[1])) stream.write(output) stream.close() try: command = [arg for arg in blog_configuration["text_editor"].split(' ') if arg != ''] command.append(output_filename) subprocess.call(command) except FileNotFoundError: die(messages.unknown_command.format(blog_configuration["text_editor"])) notify(messages.entry_written)
def yield_entries_content(): try: for filename in os.listdir(os.getcwd()+"/entries"): exploded_filename = filename.split("__") try: date = exploded_filename[1].split('-') entry_id = int(exploded_filename[0]) datetime.datetime( year=int(date[2]), month=int(date[0]), day=int(date[1]), hour=int(date[3]), minute=int(date[4]) ) if entry_id >= 0: yield filename else: raise ValueError except ValueError: notify(messages.invalid_entry_filename.format(filename), "YELLOW") except IndexError: notify(messages.invalid_entry_filename.format(filename), "YELLOW") except FileNotFoundError: die(messages.file_not_found.format(os.getcwd()+"/entries"))
def __init__(self): notify("┌─ "+messages.loading_data) self.blog_configuration = get_blog_configuration() self.sort_by = self.blog_configuration["sort_by"] self.enable_jsonld = self.blog_configuration["enable_jsonld"] self.enable_jsonp = self.blog_configuration["enable_jsonp"] self.blog_url = blog_configuration["blog_url"]
def do(self, entries, export_path, relative_origin, indentation_level, tree_special_char): notify(indentation_level + tree_special_char + "─ " + getattr(messages, "generating_" + self.content_type)) self.export_path = export_path self.relative_origin = relative_origin self.organize_entries(entries) super().do()
def handle_markup_language_error(message, line=None, string=None): if not message in markup_language_errors: notify(message, "RED") markup_language_errors.append(message) if line != None and string != None: lines = string.split('\n') for lineno in range(0,len(lines)): if line - 1 == lineno: print('\033[91m'+lines[lineno]+'\033[0m') else: print(lines[lineno])
def handle_markup_language_error(message, line=None, string=None): if not message in markup_language_errors: notify(message, "RED") markup_language_errors.append(message) if line != None and string != None: lines = string.split('\n') for lineno in range(0,len(lines)): if line - 1 == lineno: print('\033[91m'+lines[lineno]+'\033[0m') else: print(lines[lineno])
def build_categories_tree(entry_index, input_list, output_tree, output_leaves, max_weight, set_max_weight=None, encoding="utf-8", sub_folders=''): for category in input_list: branch = category.split(' > ') if not len(branch): continue leave = branch[-1] path = sub_folders root = output_tree for node_name in branch: if node_name == '': continue path += quirk_encoding(str(node_name + '/')) if not node_name in [metadata.value for metadata in root]: root.append(MetadataNode(node_name, entry_index)) if output_leaves != None and node_name == leave: output_leaves.append(root[-1]) try: if encoding == '': root[-1].path = "\x1a" + quirk_encoding( unidecode.unidecode(path)) else: root[-1].path = "\x1a" + urllib.parse.quote( path, encoding=encoding) except UnicodeEncodeError as e: root[-1].path = "\x1a" + path notify("\"{0}\": ".format(root[-1].path) + str(e), color="YELLOW") root = root[-1].childs else: for node in root: if node.value == node_name: node.count += 1 if set_max_weight != None and node.count > max_weight: max_weight = set_max_weight(node.count) node.related_to.append(entry_index) root = node.childs
def copy_recursively(src, dest): import errno for filename in os.listdir(src): try: shutil.copytree(src+filename, dest+filename) except shutil.Error as e: notify(messages.directory_not_copied % e, "YELLOW") except OSError as e: if e.errno == errno.ENOTDIR: shutil.copy(src+filename, dest+filename) else: notify(messages.directory_not_copied % e, "YELLOW")
def do(self): if self.datastore.enable_jsonld or self.enable_jsonp: notify(self.indentation_level+'└─ '+messages.generating_jsonld_docs) self.page_number = 0 self.current_page = 0 if len(self.pages): for page in self.pages: for entry in page: self.setup_context(entry) self.pre_iteration() self.do_iteration(entry) self.post_iteration() if self.datastore.enable_jsonld: self.do_jsonld(entry)
def copy_recursively(src, dest): import errno for filename in os.listdir(src): try: shutil.copytree(src+filename, dest+filename) except shutil.Error as e: notify(messages.directory_not_copied % e, "YELLOW") except OSError as e: if e.errno == errno.ENOTDIR: shutil.copy(src+filename, dest+filename) else: notify(messages.directory_not_copied % e, "YELLOW")
def do(self): if self.datastore.enable_jsonld or self.enable_jsonp: notify(self.indentation_level + '└─ ' + messages.generating_jsonld_docs) self.page_number = 0 self.current_page = 0 if len(self.pages): for page in self.pages: for entry in page: self.setup_context(entry) self.pre_iteration() self.do_iteration(entry) self.post_iteration() if self.datastore.enable_jsonld: self.do_jsonld(entry)
def do_jsonld(self): if self.datastore.enable_jsonld or self.datastore.enable_jsonp: from venc2.prompt import notify from venc2.l10n import messages import json notify(self.indentation_level+'└─ '+messages.generating_jsonld_doc) dump = json.dumps(self.datastore.root_as_jsonld) # TODO RUN AS PARENT CLASS METHOD if self.datastore.enable_jsonld: f = open("blog/root.jsonld", 'w') f.write(dump) if self.datastore.enable_jsonp f = open("blog/root.jsonp", 'w') f.write("function "+self.+"() {return "+dump+";}")
def rm_tree_error_handler(function, path, excinfo): if path == "blog" and excinfo[0] == FileNotFoundError: notify(messages.blog_folder_doesnt_exists,"YELLOW") return notify(str(function),"RED") notify(str(path),"RED") notify(str(excinfo[0]),"RED") exit()
def setup_archive_context(self, i, len_archives): archive = self.datastore.entries_per_dates[i] if archive.value in self.disable_threads: return None tree_special_char = '└' if i == len_archives-1 else '├' notify("│\t "+tree_special_char+"─ "+archive.value+"...") self.export_path = str("blog/"+self.sub_folders+'/'+archive.value+'/').replace(' ','-') os.makedirs(self.export_path) self.organize_entries([ entry for entry in self.datastore.get_entries_for_given_date( archive.value, self.datastore.blog_configuration["reverse_thread_order"] ) ]) return archive
def serv_blog(argv=list()): try: os.chdir("blog/") PORT = int(blog_configuration["server_port"]) server_address = ("", PORT) notify(messages.serving_blog.format(PORT)) httpd = http.server.HTTPServer(server_address, VenCServer) httpd.serve_forever() except ValueError: die(messages.server_port_is_invalid.format(blog_configuration["server_port"])) except KeyboardInterrupt: httpd.server_close() except FileNotFoundError: httpd.server_close()
def try_oembed(providers, url): try: key = [key for key in providers["oembed"].keys() if url.netloc in key][0] except IndexError: raise PatternInvalidArgument( "url", url.geturl(), messages.unknown_provider.format(url.netloc)) try: r = requests.get(providers["oembed"][key][0], params={ "url": url.geturl(), "format": "json" }) except requests.exceptions.ConnectionError as e: raise GenericMessage(messages.connectivity_issue + '\n' + str(e)) if r.status_code != 200: raise GenericMessage( messages.ressource_unavailable.format(url.geturl())) try: html = json.loads(r.text)["html"] except Exception as e: raise GenericMessage(messages.response_is_not_json.format( url.geturl())) try: cache_filename = hashlib.md5(url.geturl().encode('utf-8')).hexdigest() os.makedirs("caches/embed", exist_ok=True) f = open("caches/embed/" + cache_filename, "w") f.write(html) f.close() except PermissionError: notify(messages.wrong_permissions.format("caches/embed/" + cache_filename), color="YELLOW") return html
def serv_blog(argv=list()): try: os.chdir("blog/") PORT = int(blog_configuration["server_port"]) server_address = ("", PORT) notify(messages.serving_blog.format(PORT)) httpd = http.server.HTTPServer(server_address, VenCServer) httpd.serve_forever() except ValueError: die( messages.server_port_is_invalid.format( blog_configuration["server_port"])) except KeyboardInterrupt: httpd.server_close() except FileNotFoundError: httpd.server_close()
def worker_process_non_contextual_entry_patterns(shared_data, worker_id): for entry in shared_data["datastore"].entries[worker_id * (chunks_len):(worker_id + 1) * (chunks_len)]: shared_data["datastore"].cpu_threads_requested_entry[worker_id] = entry print(shared_data["datastore"].cpu_threads_requested_entry) return #datastore, pattern_processor, theme, chunks_len workers_count = len(datastore.cpu_threads_requested_entry) notify("│ " + ("└─ " if worker_id == workers_count - 1 else "├─ ") + messages.start_thread.format(worker_id)) for entry in datastore.entries[worker_id * (chunks_len):(worker_id + 1) * (chunks_len)]: datastore.cpu_threads_requested_entry[0] = entry if hasattr(entry, "markup_language"): markup_language = getattr(entry, "markup_language") else: markup_language = datastore.blog_configuration["markup_language"] pattern_processor.process(entry.preview, 0) process_markup_language(entry.preview, markup_language) pattern_processor.process(entry.content, 0) process_markup_language(entry.content, markup_language, entry) entry.html_wrapper = deepcopy(theme.entry) pattern_processor.process(entry.html_wrapper.processed_string, 0) entry.html_wrapper.processed_string.replace_needles() entry.rss_wrapper = deepcopy(theme.rss_entry) pattern_processor.process(entry.rss_wrapper.processed_string, 0) entry.rss_wrapper.processed_string.replace_needles() entry.atom_wrapper = deepcopy(theme.atom_entry) pattern_processor.process(entry.atom_wrapper.processed_string, 0) entry.atom_wrapper.processed_string.replace_needles()
def ftp_export_recursively(origin, ftp): folder = os.listdir(origin) for item in folder: if os.path.isdir(origin+"/"+item): try: try: ftp.mkd(item) except ftplib.error_perm as e: if not ": File exists" in str(e.args): raise ftp.cwd(ftp.pwd()+"/"+item) ftp_export_recursively(origin+"/"+item, ftp) ftp.cwd(ftp.pwd()[:-len("/"+item)]) except Exception as e: notify(item+": "+str(e), color="YELLOW") else: ftp.storbinary("STOR "+ftp.pwd()+"/"+item, open(origin+"/"+item, 'rb'))
def ftp_clean_destination(ftp): listing = list() listing = ftp.nlst() for item in listing: if item not in ['.', '..']: try: ftp.delete(item) notify(messages.item_deleted_from_server + ftp.pwd() + "/" + item) except Exception: try: ftp.rmd(item) notify(messages.item_deleted_from_server + ftp.pwd() + "/" + item) except: ftp.cwd(ftp.pwd() + "/" + item) ftp_clean_destination(ftp) ftp.cwd(ftp.pwd()[:-len("/" + item)])
def setup_archive_context(self, i, len_archives): archive = self.datastore.entries_per_archives[i] if archive.value in self.disable_threads: return None self.thread_name = archive.value tree_special_char = '└' if i == len_archives-1 else '├' notify("│\t "+tree_special_char+"─ "+archive.value+"...") self.export_path = str("blog/"+self.sub_folders+'/'+archive.value+'/') self.export_path = self.path_encode(self.export_path) os.makedirs(self.export_path) self.organize_entries([ entry for entry in self.datastore.get_entries_for_given_date( archive.value, self.datastore.blog_configuration["reverse_thread_order"] ) ]) return archive
def do_jsonld(self, node, tree_special_char): from venc2.l10n import messages import json notify(self.indentation_level + tree_special_char + ' ' + (' ├─ ' if len(node.childs) else ' └─ ') + messages.generating_jsonld_doc) blog_url = self.datastore.blog_configuration["blog_url"] category_as_jsonld = self.datastore.categories_as_jsonld[ self.category_value] position = 2 category_breadcrumb_path = '' for sub_category in self.category_value.split('/'): category_breadcrumb_path += sub_category + '/' category_as_jsonld["breadcrumb"]["itemListElement"].append({ "@type": "ListItem", "position": position, "item": { "@id": blog_url + '/' + self.sub_folders + category_breadcrumb_path + "categories.jsonld", "url": blog_url + '/' + self.sub_folders + category_breadcrumb_path, "name": self.datastore.blog_configuration["blog_name"] + ' | ' + sub_category } }) position += 1 category_as_jsonld[ "@id"] = blog_url + '/' + self.sub_folders + self.category_value + "categories.jsonld" category_as_jsonld[ "url"] = blog_url + '/' + self.sub_folders + self.category_value dump = json.dumps(category_as_jsonld) f = open(("blog/" + self.sub_folders + self.category_value + "categories.jsonld").replace(' ', '-'), 'w') f.write(dump)
def ftp_export_recursively(origin, ftp): folder = os.listdir(origin) for item in folder: if os.path.isdir(origin + "/" + item): try: try: ftp.mkd(item) except ftplib.error_perm as e: if not ": File exists" in str(e.args): raise ftp.cwd(ftp.pwd() + "/" + item) ftp_export_recursively(origin + "/" + item, ftp) ftp.cwd(ftp.pwd()[:-len("/" + item)]) except Exception as e: notify(item + ": " + str(e), color="YELLOW") else: ftp.storbinary("STOR " + ftp.pwd() + "/" + item, open(origin + "/" + item, 'rb'))
def try_oembed(providers, url): try: key = [ key for key in providers["oembed"].keys() if url.netloc in key][0] except IndexError: raise PatternInvalidArgument("url", url.geturl(), messages.unknown_provider.format(url.netloc)) try: r = requests.get(providers["oembed"][key][0], params={ "url": url.geturl(), "format":"json" }) except requests.exceptions.ConnectionError as e: raise GenericMessage(messages.connectivity_issue+'\n'+str(e)) if r.status_code != 200: raise GenericMessage(messages.ressource_unavailable.format(url.geturl())) try: html = json.loads(r.text)["html"] except Exception as e: raise GenericMessage(messages.response_is_not_json.format(url.geturl())) try: cache_filename = hashlib.md5(url.geturl().encode('utf-8')).hexdigest() os.makedirs("caches/embed", exist_ok=True) f = open("caches/embed/"+cache_filename, "w") f.write(html) f.close() except PermissionError: notify(messages.wrong_permissions.format("caches/embed/"+cache_filename), color="YELLOW") return html
def do_jsonld(self): if self.datastore.enable_jsonld or self.datastore.enable_jsonp: from venc2.prompt import notify from venc2.l10n import messages import json dump = json.dumps(self.datastore.root_as_jsonld) # TODO RUN AS PARENT CLASS METHOD if self.datastore.enable_jsonld: notify(self.indentation_level + ('└─ ' if not self.datastore.enable_jsonp else '├─ ') + messages.generating_jsonld_doc) f = open("blog/root.jsonld", 'w') f.write(dump) if self.datastore.enable_jsonp: notify(self.indentation_level + '└─ ' + messages.generating_jsonp_doc) import hashlib url_digest = hashlib.sha512( self.datastore.blog_url.encode('utf-8')) f = open("blog/root.jsonp", 'w') f.write("function _" + url_digest.hexdigest() + "() {return " + dump + ";}")
def export_blog(argv=list()): theme, theme_folder = init_theme(argv) patterns_map = PatternsMap(datastore, code_highlight, theme) pattern_processor = setup_pattern_processor(patterns_map) notify("├─ "+messages.pre_process) process_non_contextual_patterns(pattern_processor, theme) # cleaning directory shutil.rmtree("blog", ignore_errors=False, onerror=rm_tree_error_handler) os.makedirs("blog") # Starting second pass and exporting from venc2.threads.main import MainThread thread = MainThread(messages.export_main_thread, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_archives"]: from venc2.threads.archives import ArchivesThread thread = ArchivesThread(messages.export_archives, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_categories"]: from venc2.threads.categories import CategoriesThread thread = CategoriesThread(messages.export_categories, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_single_entries"]: from venc2.threads.entries import EntriesThread thread = EntriesThread(messages.export_single_entries, datastore, theme, patterns_map) thread.do() # Copy assets and extra files notify('└─ '+messages.copy_assets_and_extra_files) code_highlight.export_style_sheets() copy_recursively("extra/","blog/") copy_recursively(theme_folder+"assets/","blog/") notify(messages.task_done_in_n_seconds.format(round(time.time() - start_timestamp,6)))
def export_blog(argv=list()): theme, theme_folder = init_theme(argv) patterns_map = PatternsMap(datastore, code_highlight, theme) pattern_processor = setup_pattern_processor(patterns_map) notify("├─ "+messages.pre_process) process_non_contextual_patterns(pattern_processor, theme) # cleaning directory shutil.rmtree("blog", ignore_errors=False, onerror=rm_tree_error_handler) os.makedirs("blog") # Starting second pass and exporting from venc2.threads.main import MainThread thread = MainThread(messages.export_main_thread, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_archives"]: from venc2.threads.archives import ArchivesThread thread = ArchivesThread(messages.export_archives, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_categories"]: from venc2.threads.categories import CategoriesThread thread = CategoriesThread(messages.export_categories, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_single_entries"]: from venc2.threads.entries import EntriesThread thread = EntriesThread(messages.export_single_entries, datastore, theme, patterns_map) thread.do() # Copy assets and extra files notify('└─ '+messages.copy_assets_and_extra_files) code_highlight.export_style_sheets() copy_recursively("extra/","blog/") copy_recursively(theme_folder+"assets/","blog/") notify(messages.task_done_in_n_seconds.format(round(time.time() - start_timestamp,6)))
def export_blog(argv=list()): global datastore if datastore == None: datastore = DataStore() global code_highlight code_highlight = CodeHighlight( datastore.blog_configuration["code_highlight_css_override"]) theme, theme_folder = init_theme(argv) patterns_map = PatternsMap(datastore, code_highlight, theme) pattern_processor = setup_pattern_processor(patterns_map) notify("├─ " + messages.pre_process) process_non_contextual_patterns(pattern_processor, theme, patterns_map) PRINT("CUT") exit(0) # cleaning directory shutil.rmtree("blog", ignore_errors=False, onerror=rm_tree_error_handler) os.makedirs("blog") # Starting second pass and exporting from venc2.threads.main import MainThread thread = MainThread(messages.export_main_thread, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_archives"]: from venc2.threads.archives import ArchivesThread thread = ArchivesThread(messages.export_archives, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_categories"]: from venc2.threads.categories import CategoriesThread thread = CategoriesThread(messages.export_categories, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_single_entries"]: from venc2.threads.entries import EntriesThread thread = EntriesThread(messages.export_single_entries, datastore, theme, patterns_map) thread.do() if not datastore.blog_configuration["disable_chapters"]: from venc2.threads.chapters import ChaptersThread thread = ChaptersThread(messages.export_chapters, datastore, theme, patterns_map) thread.do() # Copy assets and extra files notify('└─ ' + messages.copy_assets_and_extra_files) code_highlight.export_style_sheets() copy_recursively("extra/", "blog/") copy_recursively(theme_folder + "assets/", "blog/") for depenpency in theme_assets_dependencies: try: shutil.copyfile( os.path.expanduser("~") + "/.local/share/VenC/themes_assets/" + depenpency, "blog/" + depenpency) except IsADirectoryError: shutil.copytree( os.path.expanduser("~") + "/.local/share/VenC/themes_assets/" + depenpency, "blog/" + depenpency) except FileNotFoundError as e: notify(messages.file_not_found.format(e.filename), color="YELLOW") notify( messages.task_done_in_n_seconds.format( round(time.time() - start_timestamp, 6)))
def get_blog_configuration(): try: blog_configuration = yaml.load( open(os.getcwd() + "/blog_configuration.yaml", 'r').read()) mandatory_fields = [ "blog_name", "text_editor", "date_format", "author_name", "blog_description", "blog_keywords", "author_description", "license", "blog_url", "ftp_host", "blog_language", "author_email", "entries_per_pages", "columns", "feed_lenght", "reverse_thread_order", "markup_language", "disable_threads", "disable_main_thread", "disable_archives", "disable_categories", "disable_single_entries", "path_encoding", "code_highlight_css_override", "server_port", "disable_rss_feed", "disable_atom_feed", "sort_by", "enable_jsonld", "enable_jsonp" ] everything_is_okay = True for field in mandatory_fields: if not field in blog_configuration.keys(): everything_is_okay = False notify( messages.missing_mandatory_field_in_blog_conf.format( field), "RED") mandatory_fields = [ "index_file_name", "category_directory_name", "dates_directory_name", "entry_file_name", "rss_file_name", "atom_file_name", "ftp", "entries_sub_folders", "categories_sub_folders", "dates_sub_folders" ] for field in mandatory_fields: if not field in blog_configuration["path"].keys(): everything_is_okay = False notify( messages.missing_mandatory_field_in_blog_conf.format( field), "RED") if not "https://schema.org" in blog_configuration.keys(): blog_configuration["https://schema.org"] = {} if not blog_configuration["markup_language"] in [ "none", "Markdown", "reStructuredText" ]: everything_is_okay = False notify( messages.unknown_markup_language.format( blog_configuration["markup_language"], "blog_configuration.yaml"), "RED") if (not "sort_by" in blog_configuration.keys() ) or blog_configuration["sort_by"] in ['', None]: blog_configuration["sort_by"] = "id" if blog_configuration["blog_url"][-1:] == '/': blog_configuration["blog_url"] = blog_configuration[ "blog_url"][:-1] if not everything_is_okay: exit() return blog_configuration except FileNotFoundError: die(messages.no_blog_configuration) except PermissionError: die(messages.no_blog_configuration) except yaml.scanner.ScannerError: die(messages.possible_malformed_blogC_configuration)
def do(self, entries, export_path, relative_origin, indentation_level, tree_special_char): notify(indentation_level+tree_special_char+"─ "+getattr(messages, "generating_"+self.content_type)) self.export_path = export_path self.relative_origin = relative_origin self.organize_entries(entries) super().do()
def __init__(self, filename, paths, jsonld_callback, date_format, encoding="utf-8", ): self.previous_entry = None self.next_entry = None self.chapter = None self.schemadotorg = {} # Loading raw_data = open(os.getcwd()+"/entries/"+filename,'r').read() entry_parted = raw_data.split("---VENC-BEGIN-PREVIEW---\n") if len(entry_parted) == 2: entry_parted = [entry_parted[0]] + entry_parted[1].split("---VENC-END-PREVIEW---\n") if len(entry_parted) == 3: try: self.preview = ProcessedString(entry_parted[1], filename) self.content = ProcessedString(entry_parted[2], filename) except IllegalUseOfEscape: die(messages.illegal_use_of_escape.format(filename)) try: metadata = yaml.load(entry_parted[0], Loader=yaml.FullLoader) except yaml.scanner.ScannerError as e: die(messages.possible_malformed_entry.format(filename, ''), extra=str(e)) else: cause = messages.missing_separator_in_entry.format("---VENC-END-PREVIEW---") die(messages.possible_malformed_entry.format(filename, cause)) else: cause = messages.missing_separator_in_entry.format("---VENC-BEGIN-PREVIEW---") die(messages.possible_malformed_entry.format(filename, cause)) # Setting up optional metadata for key in metadata.keys(): if not key in ["authors", "tags", "categories", "title"]: if metadata[key] != None: if key == "https://schema.org": self.schemadotorg = metadata[key] else: setattr(self, key, metadata[key]) else: notify(messages.invalid_or_missing_metadata.format(key, filename), color="YELLOW") setattr(self, key, '') # Fix missing or incorrect metadata for key in ["authors", "tags", "categories", "title"]: if key not in metadata.keys() or metadata[key] == None: notify(messages.invalid_or_missing_metadata.format(key, filename), color="YELLOW") metadata[key] = '' self.raw_metadata = metadata self.filename = filename self.id = int(filename.split('__')[0]) raw_date = filename.split('__')[1].split('-') self.date = datetime.datetime( year=int(raw_date[2]), month=int(raw_date[0]), day=int(raw_date[1]), hour=int(raw_date[3]), minute=int(raw_date[4]) ) self.formatted_date = self.date.strftime(date_format) try: self.title = metadata["title"].replace(".:GetEntryTitle:.",'') except KeyError: die(messages.missing_mandatory_field_in_entry.format("title", self.id)) try: self.authors = [ e.strip() for e in metadata["authors"].split(",")] if type(metadata["authors"]) == str else metadata["authors"] if type(self.authors) != list: raise GenericMessage(messages.entry_metadata_is_not_a_list.format("authors", self.id)) except KeyError: die(messages.missing_mandatory_field_in_entry.format("authors", self.id)) try: self.tags = [ e.strip() for e in metadata["tags"].split(",")] if type(metadata["tags"]) == str else metadata["tags"] if type(self.tags) != list: raise GenericMessage(messages.entry_metadata_is_not_a_list.format("tags", self.id)) except KeyError: die(messages.missing_mandatory_field_in_entry.format("tags", self.id)) params = { "entry_id": self.id, "entry_title": self.title } # TODO MAY BE OPTIMIZED sf = paths["entries_sub_folders"].format(**params) if encoding == '': self.sub_folder = quirk_encoding(unidecode.unidecode(sf))+'/' if sf != '' else '' self.url = "\x1a"+self.sub_folder if self.sub_folder == '' or paths["entry_file_name"] != "index.html": self.url += quirk_encoding( unidecode.unidecode( paths["entry_file_name"].format(**params) ) ) else: try: self.sub_folder = urllib.parse.quote(sf, encoding=encoding)+'/' if sf != '' else '' self.url = "\x1a"+self.sub_folder if self.sub_folder == '' or paths["entry_file_name"] != "index.html": self.url += urllib.parse.quote(paths["entry_file_name"].format(**params), encoding=encoding) except UnicodeEncodeError as e: self.url = "\x1a"+self.sub_folder+paths["entry_file_name"].format(**params) notify("\"{0}\": ".format(sf+paths["entry_file_name"].format(**params))+str(e), color="YELLOW") self.categories_leaves = list() self.raw_categories = [ c.strip() for c in metadata["categories"].split(',')] try: for category in self.raw_categories: category_leaf = category.split(' > ')[-1].strip() if len(category_leaf) != 0: category_leaf_path = "\x1a" for sub_category in category.split(' > '): category_leaf_path +=sub_category.strip()+'/' self.categories_leaves.append({ "value": category_leaf, "path": category_leaf_path, "branch" : category }) except IndexError : # when list is empty pass self.categories_tree = [] build_categories_tree(-1, self.raw_categories, self.categories_tree, None, -1, encoding=encoding, sub_folders=paths["categories_sub_folders"]) self.html_categories_tree = {} self.html_tags = {} self.html_authors = {} self.html_categories_leaves = {} self.html_for_metadata = {} if jsonld_callback != None: jsonld_callback(self)
# the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # VenC is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with VenC. If not, see <http://www.gnu.org/licenses/>. import locale from venc2.prompt import notify try: current_locale = locale.getlocale()[0].split('_')[0] locale_err = False except locale.Error as e: notify(e.args, color="YELLOW") current_locale = 'en' locale_err = True if current_locale == 'fr': from venc2.l10n import fr as language else: from venc2.l10n import en as language messages = language.Messages()
def __init__(self): notify("┌─ " + messages.loading_data) self.root_page = None self.blog_configuration = get_blog_configuration() self.sort_by = self.blog_configuration["sort_by"] self.enable_jsonld = self.blog_configuration["enable_jsonld"] self.enable_jsonp = self.blog_configuration["enable_jsonp"] self.blog_url = self.blog_configuration["blog_url"] self.path_encoding = self.blog_configuration["path_encoding"] self.disable_threads = [ thread_name.strip() for thread_name in self.blog_configuration["disable_threads"].split(',') ] self.entries = list() self.entries_per_archives = list() self.entries_per_categories = list() try: self.cpu_threads_requested_entry = [None] * cpu_count() except NotImplementedError: self.cpu_threads_requested_entry = [None] self.max_category_weight = 1 self.categories_leaves = [] self.embed_providers = {} self.html_categories_tree = {} self.html_categories_leaves = {} self.html_blog_archives = {} self.cache_get_entry_attribute_by_id = {} self.cache_get_chapter_attribute_by_index = {} self.generation_timestamp = datetime.datetime.now() self.raw_chapters = {} self.chapters_index = [] self.html_chapters = {} # Build JSON-LD doc if any if self.enable_jsonld or self.enable_jsonp: if "https://schema.org" in self.blog_configuration.keys(): self.optionals_schemadotorg = self.blog_configuration[ "https://schema.org"] else: self.optionals_schemadotorg = {} self.entries_as_jsonld = {} self.archives_as_jsonld = {} self.categories_as_jsonld = {} self.root_site_to_jsonld() # Build entries try: jsonld_callback = self.entry_to_jsonld_callback if ( self.enable_jsonld or self.enable_jsonp) else None for filename in yield_entries_content(): self.entries.append( Entry( filename, self.blog_configuration["path"], jsonld_callback, self.blog_configuration["path"] ["archives_directory_name"], self.path_encoding)) # Might happen during Entry creation. except MalformedPatterns as e: from venc2.helpers import handle_malformed_patterns handle_malformed_patterns(e) self.entries = sorted(self.entries, key=lambda entry: self.sort(entry)) path_categories_sub_folders = self.blog_configuration["path"][ "categories_sub_folders"] + '/' path_archives_directory_name = self.blog_configuration["path"][ "archives_directory_name"] for entry_index in range(0, len(self.entries)): current_entry = self.entries[entry_index] if entry_index > 0: self.entries[entry_index - 1].next_entry = current_entry current_entry.previous_entry = self.entries[entry_index - 1] # Update entriesPerDates if path_archives_directory_name != '': formatted_date = current_entry.formatted_date entries_index = self.get_entries_index_for_given_date( formatted_date) if entries_index != None: self.entries_per_archives[entries_index].count += 1 self.entries_per_archives[entries_index].related_to.append( entry_index) else: self.entries_per_archives.append( MetadataNode(formatted_date, entry_index)) # Update entriesPerCategories try: if self.path_encoding == '': sub_folders = quirk_encoding( unidecode.unidecode(path_categories_sub_folders)) else: sub_folders = urllib_parse_quote( path_categories_sub_folders, encoding=self.path_encoding) except UnicodeEncodeError as e: notify("\"{0}\": ".format(path_categories_sub_folders) + str(e), color="YELLOW") sub_folders = sub_folders if sub_folders != '/' else '' build_categories_tree(entry_index, current_entry.raw_categories, self.entries_per_categories, self.categories_leaves, self.max_category_weight, self.set_max_category_weight, encoding=self.path_encoding, sub_folders=sub_folders) self.update_chapters(current_entry) # build chapters index path_chapters_sub_folders = self.blog_configuration["path"][ "chapters_sub_folders"] path_chapter_folder_name = self.blog_configuration["path"][ "chapter_directory_name"] #TODO: Might be not safe, must test level if is actually an int. Test as well the whole sequence. for chapter in sorted(self.raw_chapters.keys(), key=lambda x: int(x.replace('.', ''))): top = self.chapters_index index = '' levels = [ str(level) for level in chapter.split('.') if level != '' ] len_levels = len(levels) for i in range(0, len_levels): l = levels[i] if index == '': index = l else: index += '.' + l f = filter(lambda c: c.index == index, top) try: top = next(f).sub_chapters except StopIteration: if index in self.raw_chapters.keys(): # TODO: Replace this shitty bloc by a function call building path try: path = "\x1a" + ( (path_chapters_sub_folders + '/' if path_chapters_sub_folders != '' else '') + path_chapter_folder_name).format( **{ "chapter_name": self.raw_chapters[index].title, "chapter_index": index }) try: if self.path_encoding == '': path = quirk_encoding( unidecode.unidecode(path)) else: path = urllib_parse_quote( path, encoding=self.path_encoding) except UnicodeEncodeError as e: notify("\"{0}\": ".format( path_chapters_sub_folders) + str(e), color="YELLOW") except KeyError as e: from venc2.helpers import die die(messages.variable_error_in_filename.format(e)) top.append( Chapter(index, self.raw_chapters[index], path)) self.raw_chapters[index].chapter = top[-1] else: top.append(Chapter(index, None, '')) top = top[-1].sub_chapters # Setup BlogArchives Data self.blog_archives = list() path_archives_sub_folders = self.blog_configuration["path"][ "archives_sub_folders"] + '/' for node in self.entries_per_archives: try: if self.path_encoding == '': sub_folders = quirk_encoding( unidecode.unidecode(path_archives_sub_folders)) else: sub_folders = urllib_parse_quote( path_archives_sub_folders, encoding=self.path_encoding) except UnicodeEncodeError as e: notify("\"{0}\": ".format(path_archives_sub_folders) + str(e), color="YELLOW") sub_folders = sub_folders if sub_folders != '/' else '' self.blog_archives.append({ "value": node.value, "path": "\x1a" + sub_folders + node.value, "count": node.count, "weight": node.weight })
# the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # VenC is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with VenC. If not, see <http://www.gnu.org/licenses/>. import locale from venc2.prompt import notify try: current_locale = locale.getlocale()[0].split('_')[0] locale_err = False except locale.Error as e: notify(e.args, color="YELLOW") current_locale = 'en' locale_err = True if current_locale == 'fr': from venc2.l10n import fr as language else: from venc2.l10n import en as language messages = language.Messages()
def new_blog(argv): if len(argv) < 1: die(Messages.missingParams.format("--new-blog")) default_configuration = { "blog_name": messages.blog_name, "disable_threads": "", "disable_archives": False, "disable_categories": False, "disable_single_entries": False, "disable_main_thread": False, "disable_rss_feed": False, "disable_atom_feed": False, "text_editor": "nano", "date_format": "%A %d. %B %Y", "author_name": messages.your_name, "blog_description": messages.blog_description, "blog_keywords": messages.blog_keywords, "author_description": messages.about_you, "license": messages.license, "blog_url": messages.blog_url, "ftp_host": messages.ftp_host, "blog_language": messages.blog_language, "author_email": messages.your_email, "code_highlight_css_override": False, "path": { "ftp": messages.ftp_path, "entries_sub_folders": "", "categories_sub_folders": "", "dates_sub_folders": "", "index_file_name": "index{page_number}.html", "category_directory_name": "{category}", "dates_directory_name": "%Y-%m", "entry_file_name": "entry{entry_id}.html", "rss_file_name": "rss.xml", "atom_file_name": "atom.xml" }, "entries_per_pages": 10, "columns": 1, "feed_lenght": 5, "reverse_thread_order": True, "markup_language": "Markdown", "path_encoding": "utf-8", "server_port": 8888, "sort_by": "id", "enable_jsonld": True, "enable_jsonp": True } for folder_name in argv: try: os.mkdir(folder_name) except OSError: die(messages.file_already_exists.format("--new-blog",os.getcwd()+'/'+folder_name)) os.mkdir(folder_name+'/'+"blog") os.mkdir(folder_name+'/'+"entries") os.mkdir(folder_name+'/'+"theme") os.mkdir(folder_name+'/'+"includes") os.mkdir(folder_name+'/'+"extra") os.mkdir(folder_name+'/'+"templates") stream = codecs.open(folder_name+'/'+'blog_configuration.yaml', 'w',encoding="utf-8") yaml.dump(default_configuration, stream, default_flow_style=False, allow_unicode=True) notify(messages.blog_created)
def __init__(self, filename, paths, jsonld_callback, date_format, encoding="utf-8", ): self.previous_entry = None self.next_entry = None self.schemadotorg = {} # Loading raw_data = open(os.getcwd()+"/entries/"+filename,'r').read() entry_parted = raw_data.split("---VENC-BEGIN-PREVIEW---\n") if len(entry_parted) == 2: entry_parted = [entry_parted[0]] + entry_parted[1].split("---VENC-END-PREVIEW---\n") if len(entry_parted) == 3: self.preview = ProcessedString(entry_parted[1], filename) self.content = ProcessedString(entry_parted[2], filename) try: metadata = yaml.load(entry_parted[0]) except yaml.scanner.ScannerError as e: die(messages.possible_malformed_entry.format(filename, ''), extra=str(e)) else: cause = messages.missing_separator_in_entry.format("---VENC-END-PREVIEW---") die(messages.possible_malformed_entry.format(filename, cause)) else: cause = messages.missing_separator_in_entry.format("---VENC-BEGIN-PREVIEW---") die(messages.possible_malformed_entry.format(filename, cause)) # Setting up optional metadata for key in metadata.keys(): if not key in ["authors", "tags", "categories", "title"]: if metadata[key] != None: setattr(self, key, metadata[key]) elif key == "https://schema.org" and metadata[key] != None: self.schemadotorg = metadata[key] else: notify(messages.invalid_or_missing_metadata.format(key, filename), color="YELLOW") setattr(self, key, '') # Fix missing or incorrect metadata for key in ["authors", "tags", "categories", "title"]: if key not in metadata.keys() or metadata[key] == None: notify(messages.invalid_or_missing_metadata.format(key, filename), color="YELLOW") metadata[key] = '' self.raw_metadata = metadata self.filename = filename self.id = int(filename.split('__')[0]) raw_date = filename.split('__')[1].split('-') self.date = datetime.datetime( year=int(raw_date[2]), month=int(raw_date[0]), day=int(raw_date[1]), hour=int(raw_date[3]), minute=int(raw_date[4]) ) self.formatted_date = self.date.strftime(date_format) try: self.title = metadata["title"].replace(".:GetEntryTitle:.",'') except KeyError: die(messages.missing_mandatory_field_in_entry.format("title", self.id)) try: self.authors = [ {"value":e} for e in list(metadata["authors"].split(",") if metadata["authors"] != str() else list()) ] except KeyError: die(messages.missing_mandatory_field_in_entry.format("authors", self.id)) try: self.tags = [ {"value":e} for e in list(metadata["tags"].split(",") if metadata["tags"] != str() else list())] except KeyError: die(messages.missing_mandatory_field_in_entry.format("tags", self.id)) params = { "entry_id": self.id, "entry_title": self.title } sf = paths["entries_sub_folders"].format(**params).replace(' ','-') self.sub_folder = urllib.parse.quote(sf, encoding=encoding)+'/' if sf != '' else '' try: self.url = ".:GetRelativeOrigin:."+self.sub_folder+urllib.parse.quote(paths["entry_file_name"].format(**params), encoding=encoding) except UnicodeEncodeError as e: self.url = ".:GetRelativeOrigin:."+self.sub_folder+paths["entry_file_name"].format(**params) notify("\"{0}\": ".format(sf+paths["entry_file_name"].format(**params))+str(e), color="YELLOW") self.categories_leaves = list() self.raw_categories = [ c.strip() for c in metadata["categories"].split(',')] try: for category in self.raw_categories: category_leaf = category.split(' > ')[-1].strip() if len(category_leaf) != 0: category_leaf_path = ".:GetRelativeOrigin:." for sub_category in category.split(' > '): category_leaf_path +=sub_category.strip()+'/' self.categories_leaves.append({ "value": category_leaf, "path": category_leaf_path, "raw" : category }) except IndexError : # when list is empty pass self.categories_tree = [] build_categories_tree(-1, self.raw_categories, self.categories_tree, None, -1, encoding=encoding, sub_folders=paths["categories_sub_folders"]) self.html_categories_tree = {} self.html_tags = {} self.html_authors = {} self.html_categories_leaves = {} if jsonld_callback != None: jsonld_callback(self)
# VenC is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with VenC. If not, see <http://www.gnu.org/licenses/>. import locale from venc2.prompt import notify try: current_locale = '.'.join(locale.getlocale()) if current_locale == None: notify( "Your system locale seems to be undefined, VenC fallback to default.", color="YELLOW") current_locale = 'en' locale.setlocale(locale.LC_ALL, current_locale) locale_err = False current_locale = current_locale.split('_')[0] except locale.Error as e: notify(e.args, color="YELLOW") current_locale = 'en' locale_err = True if current_locale == 'fr': from venc2.l10n import fr as language
def __init__(self): notify("┌─ " + messages.loading_data) self.blog_configuration = get_blog_configuration() self.sort_by = self.blog_configuration["sort_by"] self.enable_jsonld = self.blog_configuration["enable_jsonld"] self.enable_jsonp = self.blog_configuration["enable_jsonp"] self.blog_url = self.blog_configuration["blog_url"] self.disable_threads = [ thread_name.strip() for thread_name in self.blog_configuration["disable_threads"].split(',') ] self.entries = list() self.entries_per_dates = list() self.entries_per_categories = list() self.requested_entry_index = 0 self.max_category_weight = 1 self.categories_leaves = [] self.embed_providers = {} self.html_categories_tree = {} self.html_categories_leaves = {} self.html_blog_dates = {} self.generation_timestamp = datetime.datetime.now() self.raw_chapters = {} self.chapters_index = [] self.html_chapters = {} # Build JSON-LD doc if any if self.enable_jsonld or self.enable_jsonp: if "https://schema.org" in self.blog_configuration.keys(): self.optionals_schemadotorg = self.blog_configuration[ "https://schema.org"] else: self.optionals_schemadotorg = {} self.entries_as_jsonld = {} self.archives_as_jsonld = {} self.categories_as_jsonld = {} self.root_site_to_jsonld() # Build entries try: jsonld_callback = self.entry_to_jsonld_callback if ( self.enable_jsonld or self.enable_jsonp) else None for filename in yield_entries_content(): self.entries.append( Entry( filename, self.blog_configuration["path"], jsonld_callback, self.blog_configuration["path"] ["dates_directory_name"], self.blog_configuration["path_encoding"])) except MalformedPatterns as e: from venc2.helpers import handle_malformed_patterns handle_malformed_patterns(e) self.entries = sorted(self.entries, key=lambda entry: self.sort(entry)) for entry_index in range(0, len(self.entries)): current_entry = self.entries[entry_index] if entry_index > 0: self.entries[entry_index - 1].next_entry = current_entry current_entry.previous_entry = self.entries[entry_index - 1] # Update entriesPerDates if self.blog_configuration["path"]["dates_directory_name"] != '': formatted_date = current_entry.formatted_date entries_index = self.get_entries_index_for_given_date( formatted_date) if entries_index != None: self.entries_per_dates[entries_index].count += 1 self.entries_per_dates[entries_index].related_to.append( entry_index) else: self.entries_per_dates.append( MetadataNode(formatted_date, entry_index)) # Update entriesPerCategories try: sub_folders = urllib.parse.quote( self.blog_configuration["path"]["categories_sub_folders"] + '/', encoding=self.blog_configuration["path_encoding"]) except UnicodeEncodeError as e: sub_folders = self.blog_configuration["path"][ "categories_sub_folders"] + '/' notify("\"{0}\": ".format(sub_folders) + str(e), color="YELLOW") sub_folders = sub_folders if sub_folders != '/' else '' build_categories_tree( entry_index, current_entry.raw_categories, self.entries_per_categories, self.categories_leaves, self.max_category_weight, self.set_max_category_weight, encoding=self.blog_configuration["path_encoding"], sub_folders=sub_folders) self.update_chapters(current_entry) # build chapters index for chapter in sorted(self.raw_chapters.keys()): top = self.chapters_index index = '' levels = [ str(level) for level in chapter.split('.') if str(level) != '' ] len_levels = len(levels) for i in range(0, len_levels): l = levels[i] if index == '': index = l else: index += '.' + l f = filter(lambda c: c.index == index, top) try: top = next(f).sub_chapters except StopIteration: try: top.append( Chapter(index, self.raw_chapters[index][1], self.raw_chapters[index][2])) except KeyError: top.append(Chapter(index, '', '')) top = top[-1].sub_chapters # Setup BlogArchives Data self.blog_dates = list() for node in self.entries_per_dates: try: sub_folders = urllib.parse.quote( self.blog_configuration["path"]["dates_sub_folders"] + '/', encoding=self.blog_configuration["path_encoding"]) except UnicodeEncodeError as e: sub_folders = self.blog_configuration["path"][ "dates_sub_folders"] + '/' notify("\"{0}\": ".format(sub_folders) + str(e), color="YELLOW") sub_folders = sub_folders if sub_folders != '/' else '' self.blog_dates.append({ "value": node.value, "path": ".:GetRelativeOrigin:." + sub_folders + node.value, "count": node.count, "weight": node.weight })
def new_blog(argv): if len(argv) < 1: die(Messages.missingParams.format("--new-blog")) default_configuration = { "blog_name": messages.blog_name, "disable_threads": "", "disable_archives": False, "disable_categories": False, "disable_single_entries": False, "disable_main_thread": False, "disable_rss_feed": False, "disable_atom_feed": False, "text_editor": "nano", "date_format": "%A %d. %B %Y", "author_name": messages.your_name, "blog_description": messages.blog_description, "blog_keywords": messages.blog_keywords, "author_description": messages.about_you, "license": messages.license, "blog_url": messages.blog_url, "ftp_host": messages.ftp_host, "blog_language": messages.blog_language, "author_email": messages.your_email, "code_highlight_css_override": False, "path": { "ftp": messages.ftp_path, "entries_sub_folders": "", "categories_sub_folders": "", "dates_sub_folders": "", "index_file_name": "index{page_number}.html", "category_directory_name": "{category}", "dates_directory_name": "%Y-%m", "entry_file_name": "entry{entry_id}.html", "rss_file_name": "rss.xml", "atom_file_name": "atom.xml" }, "entries_per_pages": 10, "columns": 1, "feed_lenght": 5, "reverse_thread_order": True, "markup_language": "Markdown", "path_encoding": "utf-8", "server_port": 8888, "sort_by": "id", "enable_jsonld": False, "enable_jsonp": False } for folder_name in argv: try: os.mkdir(folder_name) except OSError: die(messages.file_already_exists.format("--new-blog",os.getcwd()+'/'+folder_name)) os.mkdir(folder_name+'/'+"blog") os.mkdir(folder_name+'/'+"entries") os.mkdir(folder_name+'/'+"theme") os.mkdir(folder_name+'/'+"includes") os.mkdir(folder_name+'/'+"extra") os.mkdir(folder_name+'/'+"templates") stream = codecs.open(folder_name+'/'+'blog_configuration.yaml', 'w',encoding="utf-8") yaml.dump(default_configuration, stream, default_flow_style=False, allow_unicode=True) notify(messages.blog_created)
def get_blog_configuration(): try: blog_configuration = yaml.load(open(os.getcwd()+"/blog_configuration.yaml",'r').read()) mandatory_fields = [ "blog_name", "text_editor", "date_format", "author_name", "blog_description", "blog_keywords", "author_description", "license", "blog_url", "ftp_host", "blog_language", "author_email", "entries_per_pages", "columns", "feed_lenght", "reverse_thread_order", "markup_language", "disable_threads", "disable_main_thread", "disable_archives", "disable_categories", "disable_single_entries", "path_encoding", "code_highlight_css_override", "server_port", "disable_rss_feed", "disable_atom_feed", "sort_by", "enable_jsonld", "enable_jsonp" ] everything_is_okay = True for field in mandatory_fields: if not field in blog_configuration.keys(): everything_is_okay = False notify(messages.missing_mandatory_field_in_blog_conf.format(field),"RED") mandatory_fields = [ "index_file_name", "category_directory_name", "dates_directory_name", "entry_file_name", "rss_file_name", "atom_file_name", "ftp", "entries_sub_folders", "categories_sub_folders", "dates_sub_folders" ] for field in mandatory_fields: if not field in blog_configuration["path"].keys(): everything_is_okay = False notify(messages.missing_mandatory_field_in_blog_conf.format(field),"RED") if not "https://schema.org" in blog_configuration.keys(): blog_configuration["https://schema.org"] = {} if not blog_configuration["markup_language"] in ["none", "Markdown", "reStructuredText"]: everything_is_okay = False notify(messages.unknown_markup_language.format(blog_configuration["markup_language"], "blog_configuration.yaml"),"RED") if (not "sort_by" in blog_configuration.keys() ) or blog_configuration["sort_by"] in ['', None]: blog_configuration["sort_by"] = "id" if blog_configuration["blog_url"][-1:] == '/': blog_configuration["blog_url"] = blog_configuration["blog_url"][:-1] if not everything_is_okay: exit() return blog_configuration except FileNotFoundError: die(messages.no_blog_configuration) except PermissionError: die(messages.no_blog_configuration) except yaml.scanner.ScannerError: die(messages.possible_malformed_blogC_configuration)