def process_files (cls, source_relative_path_list, source_dir, target_dir, location): logging.message('DEBUG', 'Determining which files need to be rebuilt') if 'build' in config.parameters and 'locations' in config.parameters['build'] and location in config.parameters['build']['locations'] and 'mappings' in config.parameters['build']['locations'][location]: path_mappings = config.parameters['build']['locations'][location]['mappings'] else: path_mappings = [] for source_relative_path in source_relative_path_list: source_file_path = os.path.join(source_dir, source_relative_path) # By default, we use the location and name of the source # file within the source directory as the location and # name of the target file within the target # directory. However, alternative mappings can be # specified via system variables. Each mapping is a # three-element array: A 'selection' pattern which is used # to determine whether the mapping applies to this file, a # 'find' pattern which determines what text will be modified, # and a 'replacement' pattern which determines what it will # be replaced with. target_relative_path = source_relative_path for mapping in path_mappings: if re.search(mapping[0], source_relative_path): target_relative_path = re.sub(mapping[1], mapping[2], source_relative_path) break target_file_path = os.path.join(target_dir, target_relative_path) file_processing.process(source_file_path, target_file_path)
def check_for_file_changes(cls, file_list, file_type): file_change_flags = dict() for file_path in file_list: # It's not necessarily an error if a file from the # last build doesn't exist any more, because references to # that file may also have been removed. So we don't # throw an error here, but we do warn trigger a rebuild of # any source files that referenced the file. if not os.path.isfile(file_path): logging.message( 'WARN', file_type.capitalize() + ' ' + file_path + ' is in the build log but no longer exists') file_change_flags[file_path] = True else: file_mod_time = datetime.fromtimestamp( os.path.getmtime(file_path)) if file_mod_time > cls.last_build_time: logging.message( 'TRACE', 'Detected change to ' + file_type + ' ' + file_path) file_change_flags[file_path] = True else: file_change_flags[file_path] = False return file_change_flags
def parse_arguments(cls): cls.parameters['paths'] = {} cls.parameters['paths']['salal_root'] = os.path.normpath( os.path.dirname(sys.modules['__main__'].__file__)) parser = argparse.ArgumentParser() parser.add_argument('action', action='store') parser.add_argument('profile', action='store', nargs='?', default='default') parser.add_argument('--config-file', action='store', default=os.path.join( cls.parameters['paths']['salal_root'], 'config', 'system_config.json')) parser.add_argument('--logging-level', action='store', default='INFO') cls._arguments = parser.parse_args() # we shouldn't do any logging until this point has been reached, # otherwise it won't be impacted by the logging level logging.set_logging_level(cls._arguments.logging_level) cls.parameters['action'] = cls._arguments.action logging.message( 'DEBUG', 'Using salal root directory of ' + cls.parameters['paths']['salal_root']) logging.message('DEBUG', 'Parsed command line arguments')
def apply_configuration(cls, config_type): logging.message('DEBUG', 'Applying ' + config_type + ' configuration') if 'parameters' in cls.config_data[config_type]: utilities.deep_update(cls.parameters, cls.config_data[config_type]['parameters']) if 'globals' in cls.config_data[config_type]: utilities.deep_update(cls.globals, cls.config_data[config_type]['globals'])
def execute (cls, tag): file_processing.initialize() dependencies.initialize() cls.process_content() cls.process_resources() cls.process_modules() logging.message('INFO', str(dependencies.num_files_checked()) + ' file(s) processed, ' + str(dependencies.num_files_built()) + ' file(s) built') dependencies.write_log()
def template_used(cls, template_file): if template_file not in cls.file_updates[ cls.cur_file_key]['templates']: cls.file_updates[cls.cur_file_key]['templates'].append( template_file) if template_file not in cls.template_log and template_file not in cls.template_updates: logging.message( 'TRACE', 'Detected use of new template ' + template_file + ', now tracking it') cls.template_updates.add(template_file)
def resource_used(cls, resource_file): if resource_file not in cls.file_updates[ cls.cur_file_key]['resources']: cls.file_updates[cls.cur_file_key]['resources'].append( resource_file) if resource_file not in cls.resource_log and resource_file not in cls.resource_updates: logging.message( 'TRACE', 'Detected use of new resource ' + resource_file + ', now tracking it') cls.resource_updates.add(resource_file)
def variable_used(cls, variable_name): if variable_name not in cls.file_updates[ cls.cur_file_key]['variables']: cls.file_updates[cls.cur_file_key]['variables'].append( variable_name) if variable_name not in cls.variable_log and variable_name not in cls.variable_updates: logging.message( 'TRACE', 'Detected use of new variable ' + variable_name + ', now tracking it') cls.variable_updates[variable_name] = config.globals[ variable_name]
def remove_stale_references(cls, log, change_flags, reference_type): active_references = set() for file_entry in cls.file_log.values(): if reference_type + 's' in file_entry: for reference_path in file_entry[reference_type + 's']: active_references.add(reference_path) for reference_path in change_flags: if reference_path not in active_references: logging.message( 'TRACE', reference_type.capitalize() + ' ' + reference_path + ' is no longer part of the build, discontinuing tracking') log.remove(reference_path)
def initialize(cls): logging.message('DEBUG', 'Initializing dependency tracking') # read the build log cls.build_log_file = os.path.join( config.parameters['paths']['config_root'], config.parameters['paths']['build_log_dir'], config.parameters['profile'] + '.json') if os.path.isfile(cls.build_log_file): with open(cls.build_log_file, 'r') as build_log_fh: build_log = json.load(build_log_fh) cls.last_build_time = datetime.strptime(build_log['timestamp'], '%Y-%m-%d %H:%M:%S.%f') cls.file_log = build_log['files'] cls.variable_log = build_log['variables'] cls.template_log = set(build_log['templates']) cls.resource_log = set(build_log['resources']) else: cls.last_build_time = datetime.min cls.file_log = dict() cls.variable_log = dict() cls.template_log = set() cls.resource_log = set() # initialize utility variables cls.n_files_checked = 0 cls.n_files_built = 0 cls.cur_target = None cls.cur_source = None cls.cur_file_key = None # initialize the update queue cls.cur_build_time = datetime.now() cls.file_updates = dict() cls.variable_updates = dict() cls.template_updates = set() cls.resource_updates = set() cls.file_check_flags = { file_ref: False for file_ref in cls.file_log.keys() } cls.variable_change_flags = cls.check_for_variable_changes( cls.variable_log, cls.variable_updates) cls.template_change_flags = cls.check_for_file_changes( cls.template_log, 'template') cls.resource_change_flags = cls.check_for_file_changes( cls.resource_log, 'resource')
def process(cls, source_file_path, target_file_path): logging.message('TRACE', 'Doing XML expansion') # Get the XML source file to be expanded xml_root = ET.parse(source_file_path).getroot() # We auto-generate a page ID, which is just the name of the directory # holding the index.xml file file_stem = os.path.relpath(source_file_path, config.parameters['paths']['content_root']) page_depth = file_stem.count('/') if page_depth == 0: page_id = 'home' elif page_depth == 1: page_id = re.sub(r'/index\.xml\Z', '', file_stem) else: page_id = re.sub(r'\A.+/([^/]+)/index\.xml\Z', r'\1', file_stem) xml_root.attrib['id'] = page_id # Configure the directories to be searched for templates. We # add the theme template dir after the local one, so a local # template will be found first if there is one template_dirs = [ os.path.join(config.parameters['paths']['design_root'], config.parameters['paths']['template_dir']) ] if 'theme_root' in config.parameters['paths']: template_dirs.append( os.path.join(config.parameters['paths']['theme_root'], config.parameters['paths']['design_root'], config.parameters['paths']['template_dir'])) # Initialize Jinja #env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dirs), trim_blocks = True, lstrip_blocks = True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dirs)) # If there is a modules attribute on the root, configure modules if 'modules' in xml_root.attrib: logging.message('TRACE', 'Configuring modules for ' + file_stem) cls.configure_modules(xml_root, env) # Register Salal-specific Jinja functions custom_jinja_functions.register_functions(env) # Do template expansion on the source file xml_root.text = cls.render_node( xml_root, env, VariableTracker(config.globals, success_callback=dependencies.variable_used, failure_callback=dependencies.variable_not_found)) # Write the expanded file to the target directory with open(target_file_path, mode='w', encoding='utf-8', newline='\n') as output_fh: output_fh.write(xml_root.text)
def process(cls, source_file_path, target_file_path): logging.message('TRACE', 'Doing simple expansion') source_dir, source_file = os.path.split(source_file_path) env = jinja2.Environment(loader=jinja2.FileSystemLoader(source_dir)) # In Jinja, template paths aren't file system paths and always use # forward slashes regardless of the OS template = env.get_template(source_file) output = template.render({ 'globals': VariableTracker(config.globals, success_callback=dependencies.variable_used, failure_callback=dependencies.variable_not_found) }) with open(target_file_path, mode='w', encoding='utf-8', newline='\n') as output_fh: output_fh.write(output)
def start_build_tracking(cls, target_file, source_file): cls.n_files_built += 1 cls.cur_target = target_file cls.cur_source = source_file cls.cur_file_key = target_file + cls.separator + source_file if cls.cur_file_key not in cls.file_log: logging.message( 'TRACE', 'Detected new build target ' + target_file + ', now tracking it') cls.file_updates[cls.cur_file_key] = { 'target': target_file, 'source': source_file, 'variables': [], 'templates': [], 'resources': [] }
def process_modules (cls): # Copy module files to the build directory, processing them # according to the appropriate file processing handler. # # The destination directory is determined based on the file # type and module name. So, a file within the modules # directory called 'foo/foo.css' will end up as # 'css/foo/foo.css' in the build directory. # # Theme files get processed first, so they can be overridden # by local files. This is accomplished simply by overwriting # the theme version of the file. module_dirs = cls.configure_search_dirs('module') for module_dir in module_dirs: logging.message('DEBUG', 'Processing modules from ' + module_dir) module_files = utilities.find_files(module_dir) files_to_process = cls.select_files_to_process(module_files, 'modules') cls.process_files(files_to_process, module_dir, config.parameters['paths']['profile_build_dir'], 'modules')
def process_resources (cls): # Copy all the files in the resources directory to the build # directory, processing them according to the appropriate # file processing handler. # # We write files to the same relative path in the build # directory as they had in the resources directory. So, for # profile <test>, /resources/js/app.js will become # /build/test/js/app.js. # # Theme files get processed first, so they can be overridden # by local files. This is accomplished simply by overwriting # the theme version of the file. resource_dirs = cls.configure_search_dirs('resource') for resource_dir in resource_dirs: logging.message('DEBUG', 'Processing resources from ' + resource_dir) resource_files = utilities.find_files(resource_dir) files_to_process = cls.select_files_to_process(resource_files, 'resources') cls.process_files(files_to_process, resource_dir, config.parameters['paths']['profile_build_dir'], 'resources')
def do_profile_configuration(cls): # set profile-related parameters cls.parameters['profile'] = cls._arguments.profile cls.parameters['paths']['profile_build_dir'] = os.path.join( cls.parameters['paths']['build_root'], cls.parameters['profile']) # for a non-default profile, load and apply the config file if cls.parameters['profile'] != 'default': profile_config_file = os.path.join( cls.parameters['paths']['config_root'], cls.parameters['paths']['profiles_dir'], cls.parameters['profile'] + '.json') if os.path.isfile(profile_config_file): cls.load_configuration('profile', profile_config_file) cls.apply_configuration('profile') else: logging.message( 'ERROR', 'Specified profile ' + cls._arguments.profile + ' does not exist') # log the profile name logging.message('INFO', 'Using profile ' + cls.parameters['profile'])
def check_for_variable_changes(cls, variable_log, variable_updates): variable_change_flags = dict() for variable in variable_log: # It's not necessarily an error if a variable from the # last build doesn't exist any more, because references to # that variable may also have been removed. So we don't # throw an error here, but we do warn and trigger a # rebuild of any source files that referenced the # variable. if not variable in config.globals: logging.message( 'WARN', 'Variable ' + variable + ' is in the build log but no longer exists') variable_change_flags[variable] = True elif variable_log[variable] != config.globals[variable]: logging.message('TRACE', 'Detected change to variable ' + variable) variable_change_flags[variable] = True variable_updates[variable] = config.globals[variable] else: variable_change_flags[variable] = False return variable_change_flags
def do_project_and_theme_configuration(cls): # load the project configuration project_config_file = os.path.join( cls.parameters['paths']['config_root'], cls.parameters['paths']['project_config_file']) if os.path.isfile(project_config_file): cls.load_configuration('project', project_config_file) # check if there is a theme; if so, load and apply any theme # configuration if 'parameters' in cls.config_data[ 'project'] and 'paths' in cls.config_data['project'][ 'parameters'] and 'theme_root' in cls.config_data[ 'project']['parameters']['paths']: theme_root = cls.config_data['project']['parameters']['paths'][ 'theme_root'] logging.message('INFO', 'Using theme ' + theme_root) theme_config_file = os.path.join( theme_root, cls.parameters['paths']['config_root'], cls.parameters['paths']['theme_config_file']) if os.path.isfile(theme_config_file): cls.load_configuration('theme', theme_config_file) cls.apply_configuration('theme') # apply the project configuration cls.apply_configuration('project')
def set_extension_directories(cls): # Extensions can be located in three places: The base Salal # directory, the theme directory, or the <design> directory # for the project. In each case, any extensions need to be # placed in an <extensions> directory in that location. Here # we check for the existence of these <extensions> directories, # and set the system path <extension_dirs> to a list of those that # are found. extension_locations = [ cls.parameters['paths']['salal_root'], cls.parameters['paths']['theme_root'] if 'theme_root' in config.parameters['paths'] else None, 'design' ] config.parameters['paths']['extension_dirs'] = [] for location in extension_locations: if location: extension_dir = os.path.join( location, cls.parameters['paths']['extensions_root']) if os.path.isdir(extension_dir): config.parameters['paths']['extension_dirs'].append( extension_dir) logging.message( 'DEBUG', 'Registered extensions directory ' + extension_dir)
def process (cls, source_file_path, target_file_path): if not dependencies.needs_build(target_file_path, source_file_path): logging.message('TRACE', target_file_path + ' is up to date, skipping') return # find the best matching handler best_handler = None best_priority = math.inf for key in config.parameters['file_handlers']: if cls.is_matching_handler(source_file_path, key) and config.parameters['file_handlers'][key]['priority'] < best_priority: best_handler = key best_priority = config.parameters['file_handlers'][key]['priority'] if best_handler: logging.message('TRACE', 'Processing ' + source_file_path + ' using ' + best_handler + ' handler') else: logging.message('ERROR', 'Unable to find a matching handler for ' + source_file_path + ', exiting') # create the target directory if it doesn't exist os.makedirs(os.path.dirname(target_file_path), exist_ok = True) logging.message('INFO', target_file_path) dependencies.start_build_tracking(target_file_path, source_file_path) cls.handlers[best_handler].process(source_file_path, target_file_path) dependencies.stop_build_tracking()
def execute(cls, action): # Make sure this action is defined if action not in config.parameters['action_commands']: logging.message('ERROR', 'The action ' + action + ' is not configured') else: logging.message('INFO', 'Executing ' + action + ' action') # Iterates through the list of commands associated with 'tag', # does substitution for system variables, and passes them to # the OS for execution for command_spec in config.parameters['action_commands'][action]: if command_spec['type'] == 'internal': cls.execute_internal_command(command_spec['command']) elif command_spec['type'] == 'external': command_string = utilities.substitute_variables( command_spec['command'], config.parameters) logging.message('INFO', command_string) os.system(command_string) else: logging.message( 'ERROR', 'Unrecognized command type ' + command_spec['type'])
def load_handlers(cls, directory): # Handlers are a way to determine what processing to carry out # in a particular instance based on a 'tag' value. To # implement a set of handlers, in <directory> have separate # subdirectories for each handler. Each subdirectory should # contain a <handler.py> file, which should create an object # called <handler>. The handler object should have a method # <get_tags> that returns a list of the tags that should be # associated with this particular handler. This method will # real all the <handler.py> files, and return a dict where the # keys are all the tags, and the values are the corresponding # handler objects. Generally each handler object should have # one or more additional methods that carry out the actual # processing, but what those are and how they are called is up # to the code that calls the <load_handler> method. handlers = dict() for extension_dir in config.parameters['paths']['extension_dirs']: if os.path.isdir(os.path.join(extension_dir, directory)): with os.scandir(os.path.join(extension_dir, directory)) as entries: for entry in entries: if entry.is_dir() and not entry.name.startswith('__'): handler_relative_path = os.path.join( directory, entry.name, 'handler.py') handler_full_path = os.path.join( extension_dir, handler_relative_path) if not os.path.exists(handler_full_path): logging.message( 'WARN', 'Handler directory ' + entry.name + ' does not contain a handler.py file') else: package_specifier = os.path.normpath( handler_relative_path).replace( os.sep, '.').replace('.py', '') logging.message( 'TRACE', 'Loading handler from ' + package_specifier) sys.path.insert(0, extension_dir) handler_module = importlib.import_module( package_specifier) sys.path.pop(0) tag = handler_module.handler.get_tag() logging.message('TRACE', tag) handlers[tag] = handler_module.handler return handlers
def configure_modules(cls, node, env): module_dirs = [ os.path.join(config.parameters['paths']['design_root'], config.parameters['paths']['module_dir']) ] if 'theme_root' in config.parameters['paths']: module_dirs.append( os.path.join(config.parameters['paths']['theme_root'], config.parameters['paths']['design_root'], config.parameters['paths']['module_dir'])) for module in node.attrib['modules'].split(): # try to locate the module directory module_location = None for module_dir in module_dirs: module_subdir = os.path.join(module_dir, module) if os.path.isdir(module_subdir): logging.message( 'TRACE', 'Found module ' + module + ' in ' + module_dir) module_location = module_dir env.loader.searchpath.append(module_subdir) break else: logging.message('ERROR', 'Cannot find module ' + module) # if it exists, add module style sheet to the styles list for extension, attribute in [('css', 'styles'), ('js', 'scripts')]: file_path = os.path.join(module, module + '.' + extension) if os.path.exists(os.path.join(module_location, file_path)): logging.message( 'TRACE', 'Configuring ' + attribute + ' for module ' + module) if attribute not in node.attrib: node.attrib[attribute] = '' else: node.attrib[attribute] += ' ' node.attrib[attribute] += os.path.join( os.sep, extension, file_path)
def select_files_to_process (cls, file_path_list, location): logging.message('DEBUG', 'Determining which ' + location + ' files pass include and exclude checks') files_selected = [] for file_path in file_path_list: passed_check = True if 'build' in config.parameters and 'locations' in config.parameters['build'] and location in config.parameters['build']['locations']: if 'include' in config.parameters['build']['locations'][location]: passed_check = False for pattern in config.parameters['build']['locations'][location]['include']: if re.search(pattern, file_path) != None: passed_check = True break if passed_check and 'exclude' in config.parameters['build']['locations'][location]: for pattern in config.parameters['build']['locations'][location]['exclude']: if re.search(pattern, file_path) != None: passed_check = False break if passed_check: logging.message('TRACE', 'Passed: ' + file_path) files_selected.append(file_path) else: logging.message('TRACE', 'Failed: ' + file_path) return files_selected
def variable_not_found(cls, variable_name): logging.message( 'WARN', 'Encountered reference to undefined variable ' + variable_name)
def initialize (cls): logging.message('DEBUG', 'Loading file processing handlers') cls.handlers = handlers.load_handlers(config.parameters['paths']['file_processing_handlers_dir']) if len(cls.handlers) == 0: logging.message('ERROR', 'No file processing handlers are installed, exiting') if not 'file_handlers' in config.parameters or len(config.parameters['file_handlers']) == 0: logging.message('ERROR', 'No file processing handlers are configured in the configuration files, exiting') for handler in config.parameters['file_handlers']: if not handler in cls.handlers: logging.message('ERROR', 'There is a configuration for a file processing handler ' + handler + ', but no such handler is installed') if not 'include' in config.parameters['file_handlers'][handler]: logging.message('ERROR', 'Configuration for file processing handler ' + handler + ' must have an include pattern') if not 'priority' in config.parameters['file_handlers'][handler]: logging.message('ERROR', 'Configuration for file processing handler ' + handler + ' must have a priority')
def do_system_configuration(cls): # load and apply the system configuration if not os.path.isfile(cls._arguments.config_file): logging.message('ERROR', 'Fatal error: System config file missing') cls.load_configuration('system', cls._arguments.config_file) cls.apply_configuration('system')
def load_configuration(cls, config_type, config_file): logging.message( 'DEBUG', 'Loading ' + config_type + ' configuration from ' + config_file) with open(config_file) as config_fh: cls.config_data[config_type] = json.load(config_fh)
def needs_build(cls, target_file, source_file): cls.n_files_checked += 1 target_key = target_file + cls.separator + source_file # Is this file in the build log? If not, rebuild, if yes, then # record that a build check was conducted for it and proceed. if target_key not in cls.file_log: logging.message( 'TRACE', 'Target ' + target_file + ' is not in the build log, build required') return True else: cls.file_check_flags[target_key] = True # Does the target exist? If not, rebuild. if not os.path.isfile(target_file): logging.message( 'TRACE', 'Target ' + target_file + ' does not exist, build required') return True # Is the source newer than the target? If so, rebuild. source_mod_time = datetime.fromtimestamp(os.path.getmtime(source_file)) if source_mod_time > cls.last_build_time: logging.message( 'TRACE', 'Source file ' + source_file + ' is newer than target ' + target_file + ', build required') return True # Have any referenced variables been changed? If so, rebuild. for variable in cls.file_log[target_key]['variables']: if cls.variable_change_flags[variable]: logging.message( 'TRACE', 'Variable ' + variable + ' used by target ' + target_file + ' has changed, build required') return True # Have any referenced templates been changed? If so, rebuild. for template in cls.file_log[target_key]['templates']: if cls.template_change_flags[template]: logging.message( 'TRACE', 'Template ' + template + ' used by target ' + target_file + ' has changed, build required') return True # Have any referenced resources been changed? If so, rebuild. for resource in cls.file_log[target_key]['resources']: if cls.resource_change_flags[resource]: logging.message( 'TRACE', 'Resource ' + resource + ' used by target ' + target_file + ' has changed, build required') return True return False
def write_log(cls): logging.message('DEBUG', 'Updating build log') # incorporate updates to the file, variable, template, and resource logs cls.file_log.update(cls.file_updates) cls.variable_log.update(cls.variable_updates) cls.template_log.update(cls.template_updates) cls.resource_log.update(cls.resource_updates) # remove entries for stale files (were in build log but no # longer part of the build) for file_ref in cls.file_check_flags: if not cls.file_check_flags[file_ref]: logging.message( 'TRACE', 'Target ' + cls.file_log[file_ref]['target'] + ' is no longer part of the build, discontinuing tracking and deleting from build directory' ) if os.path.exists(cls.file_log[file_ref]['target']): os.remove(cls.file_log[file_ref]['target']) cls.file_log.pop(file_ref) # remove build directories that are now empty empty_dirs = utilities.find_empty_subdirectories( config.parameters['paths']['profile_build_dir']) for dir in empty_dirs: logging.message( 'TRACE', 'Build directory ' + dir + ' no longer contains anything, deleting') os.rmdir(dir) # remove variables from the log that aren't referenced by a # file anymore variables_referenced = set() for file_entry in cls.file_log.values(): if 'variables' in file_entry: for variable in file_entry['variables']: variables_referenced.add(variable) for variable in cls.variable_change_flags: if variable not in variables_referenced: logging.message( 'TRACE', 'Variable ' + variable + ' is no longer part of the build, discontinuing tracking') cls.variable_log.pop(variable) # remove templates and resources from the log that aren't # referenced by a file anymore cls.remove_stale_references(cls.template_log, cls.template_change_flags, 'template') cls.remove_stale_references(cls.resource_log, cls.resource_change_flags, 'resource') # create the build log directory if it doesn't exist os.makedirs(os.path.join(config.parameters['paths']['config_root'], config.parameters['paths']['build_log_dir']), exist_ok=True) # write the file with open(cls.build_log_file, 'w') as build_log_fh: json.dump( { 'timestamp': cls.cur_build_time, 'files': cls.file_log, 'variables': cls.variable_log, 'templates': list(cls.template_log), 'resources': list(cls.resource_log) }, build_log_fh, default=str)