def _initialize(): """ Initialize the jinja environment. :return: """ global _environment, _output_path, _markdown_conversion, _template_configuration configuration_settings = get_configuration() # Load up and store the configuration that is defined in the template files template_configuration_path = pathlib.Path( configuration_settings.publishing.templates) / 'template.yaml' if template_configuration_path.exists(): with template_configuration_path.open() as tc_file: _template_configuration = yaml.load(tc_file) # Create markdown conversion object _markdown_conversion = markdown.Markdown() # Load the same jinja environment for everyone _environment = Environment(loader=FileSystemLoader( configuration_settings.publishing.templates), autoescape=select_autoescape()) # Load up the custom filters _environment.filters['autology_url'] = url_filter _environment.filters['markdown'] = markdown_filter # Verify that the output directory exists before starting to write out the content _output_path = pathlib.Path(configuration_settings.publishing.output) _output_path.mkdir(exist_ok=True)
def _main(args): configuration_settings = get_configuration() topics.Processing.BEGIN.publish() current_date = None for entry in log_file.walk_log_files( configuration_settings.processing.inputs): # Send out the day end event if current_date doesn't match the incoming date if current_date and current_date != entry.date.date(): topics.Processing.DAY_END.publish(date=current_date) # Send out the day start event if necessary if current_date != entry.date.date(): current_date = entry.date.date() topics.Processing.DAY_START.publish(date=current_date) # Send out the notification that the file should be processed topics.Processing.PROCESS_FILE.publish(entry=entry) # Have to send out the last day end if current_date: topics.Processing.DAY_END.publish(date=current_date) topics.Processing.END.publish() topics.Reporting.BUILD_MASTER.publish()
def _initialization(): """Initialize storage engine by querying configuration, and then file system.""" global _repo configuration = get_configuration() if not configuration.git.enabled: return # If creating the git repository fails, then initialize a new repository try: _repo = git.Repo(str(get_configuration_root())) except git.InvalidGitRepositoryError: _repo = git.Repo.init(str(get_configuration_root())) try: # Add the subscriptions for working with local storage topics.Storage.FILE_ADDED.subscribe(_file_added) topics.Storage.FINISHED_MODIFICATIONS.subscribe(_finished_changes) # Test status of remote accessibility if configuration.git.remote: origin_remote = _repo.remote(name=configuration.git.remote) origin_remote.fetch() # And then add subscriptions for handling remotes topics.Storage.PULL_CHANGES.subscribe(_pull_changes) topics.Storage.PUSH_CHANGES.subscribe(_push_changes) except (git.GitCommandError, ValueError): # Command error is raised when the remote is not accessible and attribute error is raised when the remote is not # defined. pass
def translate_path(self, path): """Code copied from parent class except the definition of the path variable.""" # abandon query parameters path = path.split('?', 1)[0] path = path.split('#', 1)[0] # Don't forget explicit trailing slash when normalizing. Issue17324 trailing_slash = path.rstrip().endswith('/') try: path = urllib.parse.unquote(path, errors='surrogatepass') except UnicodeDecodeError: path = urllib.parse.unquote(path) path = posixpath.normpath(path) words = path.split('/') words = filter(None, words) # Modify the path so that it is fetching files from the output directory, not the root of the project. path = str(pathlib.Path.cwd() / get_configuration().publishing.output) for word in words: if os.path.dirname(word) or word in (os.curdir, os.pardir): # Ignore components that are not a simple file/directory name continue path = os.path.join(path, word) if trailing_slash: path += '/' return path
def _main(args): """Generate the content for storage.""" main_path = pathlib.Path(args.output_dir) main_path.mkdir(exist_ok=True) template_definition = args.template_definition # template output directory is output/templates, so need to create that location before pulling out the templates template_location = template_utilities.get_template_directory() # Install the template and get the path to the template directory for updating the configuration file. templates_path = template_utilities.install_template(template_location, template_definition) # Now need to find the templates definition of that zip file and locate it in the file system so that it can be settings = get_configuration() # Override the configuration details with the new template path. This should probably be handled by the publishing # plugin, but for now this will work settings.publishing.templates = str(templates_path) configuration_file_path = main_path / 'config.yaml' dump_configuration(configuration_file_path, settings) # Create the initial log directories for directory in settings.processing.inputs: log_directory = main_path / directory log_directory.mkdir(parents=True, exist_ok=True)
def _initialize(): """ Look in the configuration and create a new SimpleReportPlugin for all of the activities that are defined. :return: """ for config in get_configuration().simple.activities: plugin = SimpleReportPlugin(config.id, config.name, config.description) plugin.initialize() _defined_plugins.append(plugin)
def _create_note(template, start_time, end_time): """Create a new note and display an editor for that note.""" post = template.start(start_time=start_time, end_time=end_time) directory_structure = pathlib.Path( get_configuration().processing.inputs[0]) # Need to figure out the root directory for all of the path objects if they are relative. if not directory_structure.is_absolute(): directory_structure = get_configuration_root() / directory_structure directory_structure /= pathlib.Path( start_time.strftime('%Y')) / start_time.strftime('%m') directory_structure /= pathlib.Path(start_time.strftime('%d')) directory_structure.mkdir(parents=True, exist_ok=True) file_name = directory_structure / start_time.strftime('%H%M%S.md') # Write out the file to the file_name file_name.write_text(frontmatter.dumps(post)) commands = [ a.format(file=file_name) for a in get_configuration().make_note.editor.split() ] subprocess.run(commands) # Now need to reload the contents of the file, and convert all of the time values post = md_loader.load_file(file_name) template.end(post) # Write out the results one last time with the final contents file_name.write_text(frontmatter.dumps(post)) # Notify the storage engine that everything is finished, and the file can be sent to the remote topics.Storage.FILE_ADDED.publish(file=file_name) topics.Storage.FINISHED_MODIFICATIONS.publish( message="New Note from autology make_note") topics.Storage.PULL_CHANGES.publish() topics.Storage.PUSH_CHANGES.publish()
def _find_all_project_ids(): """Walk all of the log files, and then retrieve the project definitions.""" configuration_settings = get_configuration() for entry in log_file.walk_log_files(configuration_settings.processing.inputs): project.process_file(entry) print('Defined Projects: ') for project_id in sorted(project.get_defined_projects().keys()): definition = project.get_defined_projects()[project_id] project_name = definition.get('name', '-* UNDEFINED *-') print(' {}: {}'.format(project_id, project_name))
def _update_files(): """Find each of the files in the log file and hand them to the file updaters for processing.""" configuration_settings = get_configuration() # Need to find all of the files that are stored in the input_files directories in order to start building the # reports that will be used to generate the static log files. for input_path in configuration_settings.processing.inputs: search_path = pathlib.Path(input_path) # Currently going to make the assumption that everyone is using the path naming convention that I'm dictating # which is YYYY/MM/DD/file.ext for file_component in search_path.glob('*/*/*/*'): # Store all of the files into a dictionary containing the keys and a list of the files that are associated # with that day updaters.update_files(search_path, file_component)
def _copy_static_files(): """Responsible for copying over the static files after all of the contents have been generated.""" configuration = get_configuration() template_path = pathlib.Path(configuration.publishing.templates) output_path = pathlib.Path(configuration.publishing.output) static_files_list = _template_configuration.get('static_files', []) if static_files_list: for glob_definition in static_files_list: for file in template_path.glob(glob_definition): print('Copying static file: {}'.format(file)) # Make sure that the destination directory exists before copying the file into place destination_parent = file.parent.relative_to(template_path) destination = output_path / destination_parent destination.mkdir(parents=True, exist_ok=True) shutil.copy(str(file), str(destination))
def process_datetimes(dictionary): """ Translate all of the datetime objects that are stored in the front matter and make them timezone aware. :param dictionary: the dictionary containing the values to process. This will need to be called recursively on all dictionaries contained within. :return: """ for key in dictionary.keys(): if hasattr(dictionary[key], 'tzinfo') and dictionary[key].tzinfo is None: # All of the values read in are parsed into UTC time, yaml does this conversion for us when there is # timezone information utc_value = pytz.utc.localize(dictionary[key]) site_timezone = pytz.timezone(get_configuration().site.timezone) dictionary[key] = utc_value.astimezone(site_timezone) elif hasattr(dictionary[key], 'keys'): process_datetimes(dictionary[key]) return dictionary
def _update_template(template_path): """Update the output generation templates based on the file/uri provided.""" template_definition = template_path # template output directory is output/templates, so need to create that location before pulling out the templates template_location = template_utilities.get_template_directory() # Install the template and get the path to the template directory for updating the configuration file. templates_path = template_utilities.install_template( template_location, template_definition) if templates_path: # Now need to find the templates definition of that zip file and locate it in the file system so that it can be settings = get_configuration() # Override the configuration details with the new template path. This should probably be handled by the # publishing plugin, but for now this will work settings.publishing.templates = str(templates_path) configuration_file_path = get_configuration_root() / 'config.yaml' dump_configuration(configuration_file_path, settings)
def publish(*args, context=None, **kwargs): """ Notify jinja to publish the template to the output_file location with all of the context provided. :param args: the arguments that will be used to find the template in the template configuration :param context: :param kwargs: :return: """ # Build up the context argument, special kwarg context will be used to provide a starting dictionary if not context: context = {} recursive_update(context, kwargs) # Insert all of the site details into the context as well site_configuration = get_configuration().site.toDict() recursive_update(context.setdefault('site', {}), site_configuration) # Find the template definition object template_definition = _template_configuration.get('templates', {}) for template_path in args: try: template_definition = template_definition[template_path] except KeyError: print('Cannot find template definition: {} ' 'in template definitions: {}'.format( args, _template_configuration.get('templates', {}))) raise # Load the template and render to the destination file defined in the template_definition root_template = _environment.get_template( str(template_definition['template'])) output_file = template_definition['destination'].format(**context) output_content = root_template.render(context) output_file = _output_path / output_file # Verify that the path is possible and write out the file output_file.parent.mkdir(exist_ok=True) output_file.write_text(output_content) return output_file.relative_to(_output_path)
def _main(args): """ Create a new note file in the correct location. """ loaded_templates = {} for ep in iter_entry_points(group='autology_templates'): template_object = ep.load()() if not isinstance(template_object, list): template_object = [template_object] for to in template_object: loaded_templates[to.name] = to template_name = args.template if args.template is not None else get_configuration( ).make_note.default_template template = loaded_templates[template_name] if args.template_list: print('Available templates:') for key in loaded_templates.keys(): print(' {} - {}'.format(key, loaded_templates[key].description)) else: start_date = args.start_date end_date = args.end_date if start_date: start_date = tzlocal.get_localzone().localize( datetime.datetime.strptime(start_date, DATE_FORMAT)) else: start_date = tzlocal.get_localzone().localize( datetime.datetime.now()) if end_date: end_date = tzlocal.get_localzone().localize( datetime.datetime.strptime(end_date, DATE_FORMAT)) _create_note(template, start_date, end_date)
def _pull_changes(): """Pull changes from the repository and do a merge""" configuration = get_configuration() _repo.remotes[configuration.git.remote].pull(configuration.git.refspec)
def _push_changes(): """Push changes to the repository.""" configuration = get_configuration() _repo.remotes[configuration.git.remote].push(configuration.git.refspec)
def url_filter(url): """Filter that will prepend the URL root for links in order to put the log in a directory on a web server.""" config = get_configuration() if config.publishing.url_root: return "{}{}".format(get_configuration().publishing.url_root, url) return url