def sanity_check(path): """Assure zip contents adhere to file structure standard. Args: path (str): The path to the zip to perform a sanity check on Notes: Needs to actually return values, not print them! Returns: None """ settings = lib.ini('settings') zip_file = ZipFile(path, 'r') zip_index = zip_file_index(zip_file) zip_file.close() sane_directories = settings['directories'].values() for path in zip_index: error = True path = path.split(os.path.sep, 1)[0] if os.path.basename(path) in sane_directories: continue print 'unallowed basedir: ' + path return None
def recache(): """Finally, go over the cached/refreshed contents therein the cache directory and apply functions to each. What the hell is this even for, again? Isn't this cache_generate()? """ settings = lib.ini('settings') cache_dir = settings['directories']['cache'] for directory_path, file_names in lib.index(cache_dir).items(): directories = directory_path.split(os.path.sep)[1:] new_directory = cache_dir if len(directories) > 0: new_dirs = os.path.sep.join(directories) new_directory = os.path.join(new_directory, new_dirs) for file_name in file_names: file_path = os.path.join(directory_path, file_name) cached_file_path = os.path.join(new_directory, file_name) cached_file = parse.parse_cache(file_path) with open(cached_file_path, 'w') as f: f.write(cached_file) return None
def cache(): """Completely parse all viable files therein the content directory, to the cache directory. """ flush_cache() settings = lib.ini('settings') content_dir = settings['directories']['content'] cache_dir = settings['directories']['cache'] for working_directory, file_names in lib.index().items(): # first we remove the base (usually content) directory baseless_directory_list = working_directory.split(os.path.sep)[1:] baseless_directory = os.path.sep.join(baseless_directory_list) # if there are directories in path, then we need to create them if len(baseless_directory_list) > 0: output_directory = os.path.join(cache_dir, baseless_directory) os.mkdir(output_directory) else: output_directory = cache_dir for file_name in file_names: # join the current file name to the current directory (sans base) # then join that with baseless_file_path = os.path.join(baseless_directory, file_name) content_file_path = os.path.join(content_dir, baseless_file_path) cache_file_path = os.path.join(output_directory, file_name) if file_name in ('_cache', '_generate'): # in the future this will be for exceptions for file names # and file types we want to skip parsing and just copy over with open(content_file_path) as f: cached_contents = f.read() else: # parse! cached_contents = parse.parse(baseless_file_path) with open(cache_file_path, 'w') as f: try: f.write(cached_contents) except TypeError: raise Exception((cache_file_path, cached_contents)) recache() parse.cache_generate() return None
def setup(): """Lazy installer; it sets up the directory structure for Sakura. """ settings = lib.ini('settings') for directory_type, directory_path in settings['directories'].items(): try: os.mkdir(directory_path) except OSError: print directory_type + ' already exists'
def include(document): """Returns the document contents, after making the file inclusion substitution. Replaces instances of ##inc *.*## with the contents of a plaintext file. For example ##inc foo.txt## would be replaced by the file contents of include/foo.txt. Args: document (dict): document dictionary for the document being parsed Returns: str: document contents with other file contents included; specified in inclusion tags--therein document['contents']. """ settings = lib.ini('settings') include_directory = settings['directories']['include'] # this is so messy I want to vomit out of my eye sockets # Possibility to parse inclusions within inclusions. # the iterator should be in a while loop itself! for element in document.iter_while('include'): include_tag = element.action path = os.path.join(include_directory, include_tag) # retrieve file specified in ##inc## call try: with open(path) as f: include = f.read().strip() except IOError: raise IncludeError(path, document.path) # Includes are able to reference the attributes from the # respective include-octothorpe. # ##var title## will return "wag" from ##inc title='wag'## for attribute_name, attribute_value in element.items(): octothorpe_variable = (tag.TAG_VARIABLE_LEFT + attribute_name + tag.TAG_VARIABLE_RIGHT) include = include.replace(octothorpe_variable, attribute_value) document.replace(element.full, include) return document.source
def flush_cache(): """Returns True if cache was "flushed." """ settings = lib.ini('settings') try: cache_directory = settings['directories']['cache'] if settings['backups']['before_cache'] == 'yes': backup() shutil.rmtree(cache_directory) except IOError: # this is friggin' awful; except what?! pass os.mkdir(cache_directory)
def load(public): """Returns a dictionary of "functions" (functions) and their arguments. Load functions to evaluate arguments/values pre-defined in "public" (dictionary). Args: public (dict): Keyword arguments any function may select from by key. Returns: dict: key is the func name/module name, and the values are a three-element tuple: (function, argument [tuple], replace_all [bool]) """ settings = lib.ini('settings') package = settings['directories']['function'] functions = {} for file_name in glob(package + '/*.py'): module_name, __ = os.path.splitext(os.path.basename(file_name)) if module_name == "__init__": continue # import the module and the keys/arguments module_import = "%s.%s" % (package, module_name) config_variables = ('SAKURA_ARGS', 'REPLACE_ALL') args = __import__(module_import, fromlist=config_variables) replace_all = args.REPLACE_ALL args = args.SAKURA_ARGS # load pre-defined arguments via keys/arguments args = [public[arg] for arg in args] func = __import__(module_import, fromlist=[module_name]) func = getattr(func, module_name) functions[module_name] = (func, args, replace_all) return functions
def httpd(): """THIS IS A TOY. It is only here so users may test parsed contents before making them public. """ settings = lib.ini('settings') address = settings['httpd']['address'] port = int(settings['httpd']['port']) handler = CGIHTTPServer.CGIHTTPRequestHandler handler.cgi_directories = ['/cgi'] server = ThreadingCGIServer((address, port), handler) try: while 1: sys.stdout.flush() server.handle_request() except KeyboardInterrupt: print "Finished"
def backup(): """Zip the config and content directories into a backup/date folder. Totally a snapshot thing... """ settings = lib.ini('settings') backup_directory = settings['directories']['backups'] date_time = datetime.now().isoformat() backup_directory = os.path.join(backup_directory, date_time) os.mkdir(backup_directory) # get the directories to backup, plus a setting backup_conf = settings['backups'].copy() backup_conf.pop('before_cache') # not a directory to backup! # make specified backups pending_backups = [k for k, v in backup_conf.items() if v == 'yes'] for directory in pending_backups: archive_path = backup_directory + directory shutil.make_archive(archive_path, 'zip', directory)