def get_components_to_build_list(self): """ Get 'components_to_build' list from book if necessary. """ if config.CUSTOM_COMPONENTS_TO_BUILD is False and \ self.builder_data_dict["name"] in self.lfs_book_builders: # Get 'components_to_build' from book self.index_filename = "{n}_components_to_build.txt" self.index_filename = self.index_filename.format( n=self.builder_data_dict["name"]) self.index_filename_path = os.path.join( self.builder_data_dict["lfsbuilder_tmp_directory"], self.index_filename) # Update dictionary entry tools.add_to_dictionary(self.builder_data_dict, "components_to_build", tools.list_from_file( self.index_filename_path), concat=False) # Set 'components_to_buil' to empty list if 'None' if self.builder_data_dict["components_to_build"] is None: tools.add_to_dictionary(self.builder_data_dict, "components_to_build", [], concat=False) # .- continue-at if config.CONTINUE_AT is not None and \ self.builder_data_dict["name"] not in self.disable_continue_at_builders: # .- Try to start from the 'continue-at' component self.continue_at()
def run_post_steps(component_data_dict, parent_function): # Compress 'config.BASE_DIRECTORY/tools' directory to # 'config.SAVE_TOOLCHAIN_FILENAME-ddmmyyyy.tar.gz' if # 'config.SAVE_TOOLCHAIN' is True if config.SAVE_TOOLCHAIN is True: date = datetime.date.today().strftime("%d%m%Y") filename = "{f}-{d}.tar.gz".format(f=config.SAVE_TOOLCHAIN_FILENAME, d=date) output_filename_path = os.path.abspath( os.path.join(component_data_dict["lfsbuilder_tmp_directory"], filename)) # Remove existent tarball if os.path.exists(output_filename_path): os.remove(output_filename_path) # Add 'backup_tools_cmd' to 'post' cmd = component_data_dict["backup_tools_cmd"] cmd = cmd.format(df=output_filename_path) # book's 'changingowner' commands change directory owner/group to # 'root' user. We will back 'tools' directory up before. cmd = """{cmd} {post}""".format(cmd=cmd, post=component_data_dict["post"]) tools.add_to_dictionary(component_data_dict, "post", cmd, concat=False) # Run parent function parent_function()
def continue_at(self): """ Start from the 'config.CONTINUE_AT' component of the first provided builder. Fails in case this component do not exist. """ # .- is component present if tools.is_element_present( self.builder_data_dict["components_to_build"], config.CONTINUE_AT) is True: # get component index and trim 'components_to_build' list index = tools.get_element_index( self.builder_data_dict["components_to_build"], config.CONTINUE_AT) # trim list and update 'self.builder_data_dict' value aux_list = self.builder_data_dict["components_to_build"][index:] tools.add_to_dictionary(self.builder_data_dict, "components_to_build", aux_list, concat=False) # .- set 'config.CONTINUE_AT' to 'None' so we do not get into this method # any more on the current execution setattr(config, "CONTINUE_AT", None) else: text = """'continue-at' component '{c}' do not exists on the \ 'components_to_build' list for the '{b}' builder""".format( c=config.CONTINUE_AT, b=self.builder_data_dict["name"]) printer.error(text)
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() if component_data_dict["builder_name"] == "configuration": # Build this component from outside the chroot tools.add_to_dictionary(component_data_dict, key="build_into_chroot", value=False, concat=False) tools.add_to_dictionary(component_data_dict, key="run_as_username", value="root", concat=False) # Set 'config.GRUB_ROOT_PARTITION_NAME' with stored value in the 'tmp/loop_device.txt' # file if present. loop_device = os.path.join( component_data_dict["lfsbuilder_tmp_directory"], "loop_device.txt") if os.path.exists(loop_device) is True: component_data_dict["component_substitution_list"].extend([ config.GRUB_ROOT_PARTITION_NAME, tools.read_file(loop_device).replace("/dev/", "") ])
def set_attributes(component_recipe_data, parent_function): parent_function() # Add the 'create_etc_sysconfig_console' parameter to 'post' steps. tools.add_to_dictionary( component_recipe_data, "post", component_recipe_data["create_etc_sysconfig_console"])
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() # Include tests for 'system' step to avoid issues running 'post.sh' if component_data_dict["builder_name"] == "system": tools.add_to_dictionary(component_data_dict, "include_tests", True, concat=False)
def set_attributes(component_data_dict, parent_function): # Call parent function parent_function() # Required patch for glibc to compile properly. Add it to gcc2 previous steps # http://stackoverflow.com/questions/15787684/lfs-glibc-compilation-ld-error tools.add_to_dictionary( component_data_dict, "previous", "sed -i '/k prot/agcc_cv_libc_provides_ssp=yes' gcc/configure") tools.add_to_dictionary( component_data_dict, "previous", "sed -i 's/if \((code.*))\)/if (\&1; \&\& \!DEBUG_INSN_P (insn))/' gcc/sched-deps.c" )
def __init__(self, component_data_dict): BaseComponent.__init__(self, component_data_dict) # We do not need to unpack anything, so we ran commands from the 'sources_directory' tools.add_to_dictionary(self.component_data_dict, "build_directory_path", self.component_data_dict["sources_directory"], concat=False) tools.add_to_dictionary(self.component_data_dict, "extracted_directory", self.component_data_dict["sources_directory"], concat=False)
def __init__(self, builder_data_dict): BaseComponentsBuilder.__init__(self, builder_data_dict) # Build components from the 'lfs_src_directory/tmp' directory tools.add_to_dictionary(self.builder_data_dict, "setenv_directory", builder_data_dict["lfsbuilder_tmp_directory"], concat=False) tools.add_to_dictionary(self.builder_data_dict, "sources_directory", builder_data_dict["lfsbuilder_tmp_directory"], concat=False)
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() # Command to generate img post_text = """ echo "Running \'dd\' command. This may take a while..." fallocate -l @@LFS_IMG_SIZE@@ @@LFS_IMG_FILENAME@@ echo "Running \'mkfs\' command. This may take a while..." mkfs -t @@LFS_FILESYSTEM_PARTITION_TYPE@@ -F @@LFS_IMG_FILENAME@@ """ post_text = post_text.replace("@@LFS_IMG_FILENAME@@", config.IMG_FILENAME) post_text = post_text.replace("@@LFS_IMG_SIZE@@", config.IMG_SIZE) post_text = post_text.replace("@@LFS_FILESYSTEM_PARTITION_TYPE@@", config.FILESYSTEM_PARTITION_TYPE) tools.add_to_dictionary(component_data_dict, "post", post_text, concat=False) # Run component from the 'lfsbuilder_tmp_directory' tools.add_to_dictionary(component_data_dict, key="build_directory_path", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) tools.add_to_dictionary(component_data_dict, key="extracted_directory", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) # We will move back to that directory after building the component tools.add_to_dictionary(component_data_dict, key="sources_directory", value=component_data_dict["lfsbuilder_src_directory"], concat=False)
def set_attributes(component_data_dict, parent_function): # LFS version 8.0 has a previous command error for 'sed-4.4'. We will add the correct one here # http://lfs-dev.linuxfromscratch.narkive.com/Qy0pWTSI/6-24-sed-4-4-test-failure value = component_data_dict["previous"] if value is not None: value = value.replace("sed -i 's/panic-tests.sh//' Makefile.in", "sed -i 's/testsuite.panic-tests.sh//' Makefile.in") # Update value in dictionary tools.add_to_dictionary(component_data_dict, "previous", value, concat=False) # Call parent function parent_function()
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() # Prepend setting 'LC_ALL' to 'en_US.UTF-8' to the 'configure' step # so we ensure the rest of the commands run properly configure_cmd = """LC_ALL="en_US.UTF-8" export LC_ALL {c}""".format(c=component_data_dict["configure"]) tools.add_to_dictionary(component_data_dict, "configure", configure_cmd, concat=False)
def set_attributes(component_data_dict, parent_function): # Call parent function parent_function() # Run component from the 'lfsbuilder_tmp_directory' tools.add_to_dictionary( component_data_dict, key="build_directory_path", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) tools.add_to_dictionary( component_data_dict, key="extracted_directory", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False)
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() # Add the 'mount' command sources_orig = config.SOURCES_ORIG_DIRECTORY.replace( "@@LFSBUILDER_SRC_DIRECTORY@@", component_data_dict["lfsbuilder_src_directory"] ) if tools.is_mount(os.path.join(config.BASE_DIRECTORY, "sources")) is False: # If 'sources' is not already mounted post_value = """mkdir -pv $LFS/sources mount -v --bind {orig} $LFS/sources""" post_value = post_value.format(orig=sources_orig) # Add mount command to 'post' steps tools.add_to_dictionary(component_data_dict, key="post", value=post_value, concat=False) else: # Set 'post' to 'None' tools.add_to_dictionary(component_data_dict, key="post", value=None, concat=False) # Run component from the 'lfsbuilder_tmp_directory' tools.add_to_dictionary(component_data_dict, key="build_directory_path", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) tools.add_to_dictionary(component_data_dict, key="extracted_directory", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) # We will move back to that directory after building the component tools.add_to_dictionary(component_data_dict, key="sources_directory", value=component_data_dict["lfsbuilder_src_directory"], concat=False)
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() # Create destination directories just in case tools.add_to_dictionary(component_data_dict, key="post", value="mkdir -pv $LFS/{dev,proc,sys,run}") # Try to add mount commands for every directory for directory in sorted(component_data_dict["mount_commands"].keys()): if tools.is_mount(os.path.join(config.BASE_DIRECTORY, directory)) is False: tools.add_to_dictionary( component_data_dict, key="post", value=component_data_dict["mount_commands"][directory]) # Run component from the 'lfsbuilder_tmp_directory' tools.add_to_dictionary( component_data_dict, key="build_directory_path", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) tools.add_to_dictionary( component_data_dict, key="extracted_directory", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) # We will move back to that directory after building the component tools.add_to_dictionary( component_data_dict, key="sources_directory", value=component_data_dict["lfsbuilder_src_directory"], concat=False)
def generate_dict_from_xmlfile(self, filename): """ Read XML command file and parse it into a Python dictionary. """ data_dict = {} parser = ET.XMLParser(load_dtd=True, dtd_validation=False) # Read commands from 'temporal_folder' filename = os.path.abspath(os.path.join(self.temporal_folder, filename)) xml_tree = ET.parse(filename, parser=parser) # Iterate over 'component' nodes to extract data for node in xml_tree.iter('component'): component_name = node.attrib.get('name') for subnode in node: attribute = "{c}_{stag}".format(c=component_name, stag=subnode.tag) # Add to dictionary tools.add_to_dictionary(data_dict, attribute, subnode.text, concat=False) # return generated dictionary return data_dict
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() # Run component from the 'lfsbuilder_tmp_directory' tools.add_to_dictionary(component_data_dict, key="build_directory_path", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) tools.add_to_dictionary(component_data_dict, key="extracted_directory", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) # We will move back to that directory after building the component tools.add_to_dictionary(component_data_dict, key="sources_directory", value=component_data_dict["lfsbuilder_src_directory"], concat=False)
def set_attributes(component_data_dict, parent_function): # Call parent function parent_function() if component_data_dict["builder_name"] == "system": # Custom 'localedef' command for users to add their own cmd = "localedef -i {locale} -f {charmap} {lang}".format(locale = config.LOCALE, charmap = config.CHARMAP, lang = config.LANG) tools.add_to_dictionary(component_data_dict, "post", cmd) # Run 'previous' steps into 'compile.sh' file for 'system' builder. # Starting on version 8.2 it sets the 'GCC_INCDIR' variable # which is required for 'configure' step if component_data_dict["builder_name"] == "system" and float(config.LFS_VERSION) >= 8.2: configure_cmd = """{p} {c}""".format(p=component_data_dict["previous"], c=component_data_dict["configure"]) tools.add_to_dictionary(component_data_dict, "configure", configure_cmd, concat=False) # Set 'previous' to None so it is not executed twice tools.add_to_dictionary(component_data_dict, "previous", None, concat=False)
def set_attributes(component_data_dict, parent_function): # Call parent_function parent_function() tools.add_to_dictionary(component_data_dict, key="post", value=None) # Generate 'umount' commands for 'component_data_dict["umount_directories"]' for d in component_data_dict["umount_directories"]: if tools.is_mount(os.path.join(config.BASE_DIRECTORY, d)): cmd = component_data_dict["umount_cmd"].format(d=d) tools.add_to_dictionary(component_data_dict, key="post", value=cmd) # Generate 'umount' command for 'config.BASE_DIRECTORY' if tools.is_mount(config.BASE_DIRECTORY): cmd = component_data_dict["umount_base_directory_cmd"] tools.add_to_dictionary(component_data_dict, key="post", value=cmd) # Run component from the 'lfsbuilder_tmp_directory' tools.add_to_dictionary( component_data_dict, key="build_directory_path", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) tools.add_to_dictionary( component_data_dict, key="extracted_directory", value=component_data_dict["lfsbuilder_tmp_directory"], concat=False) # We will move back to that directory after building the component tools.add_to_dictionary( component_data_dict, key="sources_directory", value=component_data_dict["lfsbuilder_src_directory"], concat=False)
def generate_components_dict(self, components_filelist): """ Generate 'components_dict' from 'components_filelist'. Parse component XML file. """ components_dict = {} for componentfile_path in components_filelist: component_filename = os.path.basename(componentfile_path) component_name = self.get_component_name(component_filename) component_recipe_data = tools.read_recipe_file(component_name) # Backup xmlfile tools.backup_file(componentfile_path) # Read 'functions.py' file if exists self.extra_functions = tools.read_functions_file(component_name) # .- modify_xml if self.extra_functions is not None and \ hasattr(self.extra_functions, "modify_xmlfile"): self.extra_functions.modify_xmlfile(component_recipe_data, componentfile_path, self.modify_xmlfile) else: self.modify_xmlfile(component_recipe_data, componentfile_path) # Create XML parser on every iteration parser = ET.XMLParser(load_dtd=True, dtd_validation=False) xml_tree = ET.parse(componentfile_path, parser=parser) # Save components list to file tools.add_text_to_file(self.save_index_file, component_name) # Do not create build directory by default key = "{c}-require_build_dir".format(c=component_name) tools.add_to_dictionary(components_dict, key, "0") # Check 'screen/userinput' nodes for node in xml_tree.iter('screen'): if node.attrib.get('revision') == config.EXCLUDED_BOOT_MANAGER: # skip unselected boot manager continue for subnode in node.iter('userinput'): # Does the 'remap' attribute exists? # If not, add it to '_previous' if 'remap' in subnode.attrib: attribute = subnode.attrib.get('remap') else: attribute = "" if attribute == "pre": # Check if we have to create a build directory if subnode.text.find("mkdir -v build") != -1: key = component_name + "-require_build_dir" tools.add_to_dictionary(components_dict, key, "1", concat=False) continue # Skip patch calls as we do this step later on elif subnode.text.find("patch -Np1") != -1: continue else: key = component_name + "-previous" elif attribute == "configure": key = component_name + "-configure" elif attribute == "make": key = component_name + "-make" elif attribute == "install": key = component_name + "-install" elif attribute == "test": key = component_name + "-test" elif attribute == "check": key = component_name + "-check" elif attribute == "locale-full": # Do not run the "locale-full" command because # it is not necessary continue elif attribute == "lfsbuilder_disabled": # Do not run the "lfsbuilder_disabled" commands because # it is not necessary continue else: # By default, add it to the post steps. # Stripping does not have 'remap' attribute key = component_name + "-post" # Add the value to dictionary tools.add_to_dictionary(components_dict, key, subnode.text) # 'parser' is no longer required del parser # Restore backup if config.RESTORE_XML_BACKUPS is True: tools.restore_backup_file(componentfile_path) # Return generated dictionary return components_dict
def modify_xmlfile(self, component_recipe_data, componentfile_path): """ Modify 'componentfile_path' XML file with common substitution and/or those in the 'component_substitution_list' entry of the component YAML recipe. This method can be overwritten in the 'functions.py' file under the component recipe directory. If so, arguments are: - 'component_recipe_data': current component data, which can be accessed and modified. - 'componentfile_path': XML file path in which you should do modifications. - 'parent_function': parent method reference. """ # Remove 'literal' subchild so commands waiting the EOF string get properly parsed substitution_list = ["<literal>", "", "</literal>", ""] # Remove commands that try to run a bash console interactively bash_removes = [ "exec /bin/bash --login +h", "exec /tools/bin/bash", "logout", "exec /tools/bin/bash --login +h", r"""chroot $LFS /tools/bin/env -i \ HOME=/root TERM=$TERM PS1='\u:\w\$ ' \ PATH=/bin:/usr/bin:/sbin:/usr/sbin \ /tools/bin/bash --login""", "chroot $LFS /tools/bin/env -i" ] # Disable bash commands and add them to the 'substitution_list' bash_removes_disabled = tools.disable_commands(bash_removes) substitution_list.extend(bash_removes_disabled) # Get component data and include its 'substitution_list', 'disable_commands' # and 'comment_out_list' elements into the 'substitution_list'. # Configure service installation for 'blfs' components that implement it. if "component_substitution_list" in component_recipe_data and \ component_recipe_data["component_substitution_list"] is not None: tools.process_component_substitution_list( component_recipe_data["component_substitution_list"]) substitution_list.extend( component_recipe_data["component_substitution_list"]) if "disable_commands_list" in component_recipe_data and \ component_recipe_data["disable_commands_list"] is not None: substitution_list.extend( tools.disable_commands( component_recipe_data["disable_commands_list"])) if "comment_out_list" in component_recipe_data and \ component_recipe_data["comment_out_list"] is not None: substitution_list.extend( tools.comment_out(component_recipe_data["comment_out_list"])) # Generate bootscript installation command # Use 'bootscript_install_cmd' for 'sysvinit' and by default. # Use 'bootscript_install_cmd_systemd' for 'systemd. # Always substitute over 'bootscript_install_cmd' because 'systemd' # command is not parsed. if "bootscript_install_cmd" in component_recipe_data and \ component_recipe_data["bootscript_install_cmd"] is not None: # Add original command substitution_list.append( component_recipe_data["bootscript_install_cmd"]) # Subsitute with 'bootscript_install_cmd_systemd' if necessary if "bootscript_install_cmd_systemd" in component_recipe_data and \ component_recipe_data["bootscript_install_cmd_systemd"] is not None and \ config.SYSTEMD is True: # Install the 'systemd' command instead tools.add_to_dictionary( component_recipe_data, "bootscript_install_cmd", component_recipe_data["bootscript_install_cmd_systemd"], concat=False) # Generate bootscript installation command substitution_list.append( tools.modify_blfs_component_bootscript_install( component_recipe_data["bootscript_install_cmd"])) # Substitute tools.substitute_multiple_in_file(componentfile_path, substitution_list)
def generate_entities_data_dict(self, entities_files=None): """ Parse entities files and generate a dictionary. """ data_dict = {} # Do not concat lines by default add_line = False saved_line = None html_comments_regexp = r"<!--(.|\s|\n)*?-->" # Sanitize input if tools.is_empty_list(entities_files): entities_files = self.entities_filelist for entity_file in entities_files: file_text = tools.read_file(entity_file) # Remove HTML comments from file_text. # It makes parsing entities much easier file_text = regexp.sub(html_comments_regexp, "", file_text, flags=regexp.DOTALL) # Restart flag for every file add_line = False for line in file_text.split("\n"): # Concatenate lines? if add_line is True: line = saved_line + line # Remove leading and trailing whitespaces in line if any line = line.rstrip().lstrip() # Check if 'line' is an ENTITY description line if line.find("ENTITY") != -1: line_fields = line.split("\"") # Process ENTITY line if we have a complete line. # That is, it ends with '>' character if line_fields[-1] == ">": # line_fields = ['<!ENTITY attr-size ', '336 KB', '>'] key = line_fields[0].split(" ")[1] # Process entities in 'value' if any value = self.process_entities(line_fields[1]) # Add to dictionary tools.add_to_dictionary(data_dict, key, value, concat=False) # Restart flag. We begin with a new different line add_line = False saved_line = "" else: # Add next line to the new one and try to process it saved_line = line add_line = True # Return generated dictionary return data_dict
def get_download_urls(self): """ Generate list with 'extra_download_urls' values for components on lfs book. """ urls_list = [] aux_urls_list = [] # We will substitute entities values twice to ensure composed placeholders # get substituted substitution_rounds = 2 builders_list = self.lfs_builders if self.downloader_data["name"] == "blfs": builders_list = self.blfs_builders # .- generate builders in 'lfs' book, get 'components_to_build' # list and add 'extra_download_urls' from components recipes. for builder in builders_list: os.chdir(self.downloader_data["lfsbuilder_src_directory"]) # .- generate builder object from BuilderGenerator bg = builders.BuilderGenerator(builder) b = bg.get_builder_reference() del bg # .- read entities in case we need to substitue in any URL xmlp = xmlparser.LFSXmlParser( b.builder_data_dict ) # .- write commands xml file and read it xmlp.generate_commands_xmlfile() destination_filename = getattr( config, "{b}_XML_FILENAME".format( b=b.builder_data_dict["name"].upper() ) ) components_data_dict = xmlp.generate_dict_from_xmlfile(destination_filename) # .- read entities so we can do substitutions in case it is necessary entities_data = xmlp.generate_entities_data_dict() # .- always download 'blfs-bootscripts' component for 'blfs', # which includes both 'blfs-bootscripts' for 'sysvinit' and # 'blfs-systemd-units' for 'systemd' tarballs # to be able to install services at build time. if self.downloader_data["name"] == "blfs": tools.remove_all_and_add_element( b.builder_data_dict["components_to_build"], "blfs-bootscripts" ) # .- add 'extra_download_urls' from builder 'components_to_build' recipes. if b.builder_data_dict["components_to_build"] is not None: for component in b.builder_data_dict["components_to_build"]: # .- read component recipe component_recipe_data = tools.read_recipe_file(component) # .- add 'extra_download_urls' if present in 'component_recipe_data' if "extra_download_urls" in component_recipe_data and \ component_recipe_data["extra_download_urls"] is not None: # add to 'aux_urls_list' if not present already for url in component_recipe_data["extra_download_urls"]: # .- process_entities in url url = xmlp.process_entities(url) if url not in aux_urls_list: # .- substitute entities i = 0 while i < substitution_rounds: for entity_key in entities_data.keys(): url = url.replace( tools.generate_placeholder(entity_key), entities_data[entity_key] ) # .- update index i += 1 # .- add 'url' to 'aux_urls_list' aux_urls_list.append(url) # .- update 'component_url' in case it is present in 'component_recipe_data' key = "{c}_url".format(c=component) if key in component_recipe_data and component_recipe_data[key] is not None: # .- update value read 'components_data' tools.add_to_dictionary( components_data_dict, key, component_recipe_data[key], concat=False ) # .- add 'component_url' to the 'aux_urls_list' if components_data_dict[key] is not None and \ components_data_dict[key] not in aux_urls_list: # .- substitute entities url = components_data_dict[key] # .- process_entities in url url = xmlp.process_entities(url) i = 1 while i < substitution_rounds: for entity_key in entities_data.keys(): url = url.replace( tools.generate_placeholder(entity_key), entities_data[entity_key] ) # .- update index i += 1 # .- add to 'aux_urls_list' aux_urls_list.append(url) # .- delete 'builder' reference del b for url in aux_urls_list: # .- add modified url. Check is not present in # 'urls_list' in case there are duplicated componentes. # For example, 'binutils' and 'binutils2' if (url is not None) and (url not in urls_list): urls_list.append(url) # .- return generated 'urls_list' and # ensure there is no any empty string on it tools.remove_element(urls_list, "") return urls_list
def __init__(self, builder_name): # Default values self.builder_data_dict = { "name": builder_name, "env_PATH_value": "${UNSET_VARIABLE}", "chapters_list": [], "excludes": [], "components_to_build": [], "setenv_directory": config.BASE_DIRECTORY, "setenv_filename": "setenv.sh", "setenv_template": "setenv.tpl", "book": "lfs", "runscript_cmd": "env -i /bin/bash", "base_module": "builders", "base_builder": "ComponentsBuilder", "sources_directory": os.path.join(config.BASE_DIRECTORY, "sources"), "tools_directory": os.path.join(config.BASE_DIRECTORY, "tools"), "lfsbuilder_src_directory": os.path.dirname(os.path.realpath(__file__)), "lfsbuilder_tmp_directory": os.path.join(os.path.dirname(os.path.realpath(__file__)), "tmp"), "lfsbuilder_templates_directory": os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates") } # Read the builder recipe and return a reference to the object type self.builder_recipe_data = tools.read_recipe_file( self.builder_data_dict["name"], directory="builders") # Add 'xml_commands_filename' to 'self.builder_data_dict' # from 'config.py' file. default=None getattr_data_value = self.builder_data_dict["name"].upper() tools.add_to_dictionary( self.builder_data_dict, "xml_commands_filename", getattr(config, "{b}_XML_FILENAME".format(b=getattr_data_value), None), concat=False) # Join dicts. 'self.builder_recipe_data' values will have preference # over those currently in 'self.builder_data_dict' (defaults) self.builder_data_dict = tools.join_dicts(self.builder_data_dict, self.builder_recipe_data) # Include '-x' parameter to the 'runscript_cmd' # if 'config.DEBUG_SCRIPTS' is 'True' if config.DEBUG_SCRIPTS is True: value = "{c} -x".format(c=self.builder_data_dict["runscript_cmd"]) tools.add_to_dictionary(self.builder_data_dict, "runscript_cmd", value, concat=False) # Instantiate a ComponentsBuilder by default self.class_fullname = "{m}.{t}".format( m=self.builder_data_dict["base_module"], t=self.builder_data_dict["base_builder"]) # Instantiate a 'InfrastructureComponentsBuilder' if required if self.builder_data_dict["base_builder"].lower( ) == "componentsbuilder": self.class_fullname = "{m}.{t}".format( m=self.builder_data_dict["base_module"], t="ComponentsBuilder") elif self.builder_data_dict["base_builder"].lower( ) == "infrastructurecomponentsbuilder": self.class_fullname = "{m}.{t}".format( m=self.builder_data_dict["base_module"], t="InfrastructureComponentsBuilder") else: text = "Unknown 'base_builder': '{b}'" text = text.format(b=self.builder_data_dict["base_builder"]) printer.error(text) # Create object self.obj = tools.get_class(self.class_fullname)
def __init__(self, component_name, builder_data_dict, xml_components_data_dict): self.component_data_defaults = { "name": component_name, "show_name": component_name, "key_name": component_name, "package_name": component_name, "version": None, "extracted_directory": None, "build_directory_path": None, "buildscript_path": None, "require_build_dir": False, "run_as_username": config.NON_PRIVILEGED_USERNAME, "runscript_cmd": builder_data_dict["runscript_cmd"], "sources_directory": "{b}/sources".format(b=config.BASE_DIRECTORY), "builder_name": builder_data_dict["name"], "component_substitution_list": None, "disable_commands_list": None, "configure": None, "make": None, "install": None, "test": None, "include_tests": None, "configure_options": None, "make_options": None, "install_options": None, "test_options": None, "base_module": "components", "base_component": "CompilableComponent", "setenv_directory": builder_data_dict["setenv_directory"], "setenv_filename": builder_data_dict["setenv_filename"], "build_into_chroot": builder_data_dict["build_into_chroot"], "script_template": "script.tpl", "lfsbuilder_src_directory": builder_data_dict["lfsbuilder_src_directory"], "lfsbuilder_tmp_directory": builder_data_dict["lfsbuilder_tmp_directory"], "lfsbuilder_templates_directory": builder_data_dict["lfsbuilder_templates_directory"] } # Update keys, values that match the 'key_name' # from 'xml_components_data_dict' for key, value in xml_components_data_dict.items(): if key.startswith(self.component_data_defaults["key_name"]): # Rename 'component named' keys removing the 'component_name' and add to dictionary new_key = key.replace("{c}_".format(c=self.component_data_defaults["key_name"]), "") tools.add_to_dictionary(self.component_data_defaults, new_key, value, concat=False) # Read component recipe self.component_recipe_data = tools.read_recipe_file(component_name) self.component_data_dict = tools.join_dicts(self.component_data_defaults, self.component_recipe_data) # Cast 'require_build_dir' from string to bool # bool(int 1) = True # bool(int 0) = False if "require_build_dir" in self.component_data_dict and \ self.component_data_dict["require_build_dir"] is not None: # Update value tools.add_to_dictionary( self.component_data_dict, "require_build_dir", bool(int(self.component_data_dict["require_build_dir"])), concat=False ) elif "require_build_dir" in self.component_data_dict and \ self.component_data_dict["require_build_dir"] is None: # Set 'require_build_dir' to 'False' tools.add_to_dictionary( self.component_data_dict, "require_build_dir", False, concat=False ) else: # Set 'require_build_dir' to 'False' tools.add_to_dictionary( self.component_data_dict, "require_build_dir", False, concat=False ) # Ensure 'run_into_chroot' is a boolean value if "build_into_chroot" in self.component_data_dict and \ self.component_data_dict["build_into_chroot"] is not None: # Update value tools.add_to_dictionary( self.component_data_dict, "build_into_chroot", bool(int(self.component_data_dict["build_into_chroot"])), concat=False ) elif "build_into_chroot" in self.component_data_dict and \ self.component_data_dict["build_into_chroot"] is None: # Set 'build_into_chroot' to 'False' tools.add_to_dictionary( self.component_data_dict, "build_into_chroot", False, concat=False ) else: # Set 'build_into_chroot' to 'False' tools.add_to_dictionary( self.component_data_dict, "build_into_chroot", False, concat=False ) # Process 'component_substitution_list' if self.component_data_dict["component_substitution_list"] is not None: tools.process_component_substitution_list( self.component_data_dict["component_substitution_list"] ) # Instanciate component object. # Select component type for instance # .- CompilableComponent if self.component_data_dict["base_component"].lower() == "compilablecomponent": self.class_fullname = "{m}.{t}".format(m=self.component_data_dict["base_module"], t="CompilableComponent") # .- SystemConfigurationComponent elif self.component_data_dict["base_component"].lower() == "systemconfigurationcomponent": self.class_fullname = "{m}.{t}".format(m=self.component_data_dict["base_module"], t="SystemConfigurationComponent") else: text = "Unknown 'base_component': '{b}'" text = text.format(b=self.component_data_dict["base_component"]) printer.error(text) # Create object self.obj = tools.get_class(self.class_fullname)
def run_post_steps(component_data_dict, parent_function): parent_function() # Get required paths ssh_filename = os.path.join( component_data_dict["lfsbuilder_src_directory"], "recipes", "components", "openssh", "files", component_data_dict["openssh_public_key_filename"]) # .- get $HOME directory path if component_data_dict["openssh_username"] == "root": # It can be dangerous! printer.warning("WARNING: will configure SSH access for 'root'") home_directory = os.path.join(config.BASE_DIRECTORY, "root") elif component_data_dict[ "openssh_username"] == "config.NON_PRIVILEGED_USERNAME": # Update dictionary value tools.add_to_dictionary(component_data_dict, "openssh_username", config.NON_PRIVILEGED_USERNAME, concat=False) # Set home directory home_directory = os.path.join(config.BASE_DIRECTORY, "home", config.NON_PRIVILEGED_USERNAME) else: home_directory = os.path.join(config.BASE_DIRECTORY, "home", component_data_dict["openssh_username"]) # .- '$HOME/.ssh' path ssh_config_path = os.path.join(home_directory, ".ssh") # .- destination file path ssh_destination_filename = os.path.join( ssh_config_path, component_data_dict["openssh_public_key_filename"]) # .- authorized_keys authorized_keys = os.path.join(ssh_config_path, "authorized_keys") if os.path.exists(ssh_filename) is False: # Do not configure. SSH public key do not exists. msg = """WARNING: SSH access will not be configured because \ the provided public key file '{k}' do not exists.""" msg = msg.format(k=component_data_dict["openssh_public_key_filename"]) printer.warning(msg) elif tools.check_chroot_user_exists( component_data_dict["openssh_username"]) is False: # Do not configure. SSH username do not exists. msg = """WARNING: SSH access will not be configured because \ the provided username '{u}' do not exists.""" msg = msg.format(u=component_data_dict["openssh_username"]) printer.warning(msg) elif os.path.exists(home_directory) is False: # Do not configure. SSH username's home directory do not exists. msg = """WARNING: SSH access will not be configured because \ the home directory '{h}' do not exists.""" msg = msg.format(h=home_directory) printer.warning(msg) else: msg = "Installing provided SSH public key '{k}' for username '{u}'" msg = msg.format(k=component_data_dict["openssh_public_key_filename"], u=component_data_dict["openssh_username"]) printer.substep_info(msg) # .- create 'ssh_config_path' directory tools.create_directory(ssh_config_path) # .- copy public key file tools.copy_file(ssh_filename, ssh_destination_filename) # .- add to authorized keys header = "# --- {f} ---".format( f=component_data_dict["openssh_public_key_filename"]) tools.add_text_to_file(authorized_keys, header) tools.add_text_to_file(authorized_keys, tools.read_file(ssh_filename)) # .- get 'UID' and 'GID' values to set permission etc_passwd_values = tools.get_uid_gid_chroot_username( component_data_dict["openssh_username"]) # .- set 'ssh_config_path' permission tools.set_numeric_recursive_owner_and_group(ssh_config_path, etc_passwd_values["uid"], etc_passwd_values["gid"])
def extract_source_code(self): """ Find and extract source tarball for the component. """ # We look for a tar file pattern = "{n}*.tar.*".format(n=self.component_data_dict["package_name"]) # Use 'package_version' in pattern if it is not None if self.component_data_dict["version"] is not None: pattern = "{n}*{v}*.tar.*".format(n=self.component_data_dict["package_name"], v=self.component_data_dict["version"]) source_code_filename = tools.find_file(self.component_data_dict["sources_directory"], pattern) # Try a second run if 'source_code_filename' is None using only name as pattern. if source_code_filename is None: pattern = "{n}*.tar.*".format(n=self.component_data_dict["package_name"]) source_code_filename = tools.find_file(self.component_data_dict["sources_directory"], pattern) # Try to find a zip file in case the tar file was not found if source_code_filename is None: pattern = "{n}*.zip*".format(n=self.component_data_dict["package_name"]) # Use 'package_version' in pattern if it is not None if self.component_data_dict["version"] is not None: pattern = "{n}*{v}*.zip*".format(n=self.component_data_dict["package_name"], v=self.component_data_dict["version"]) source_code_filename = tools.find_file(self.component_data_dict["sources_directory"], pattern) # Try a second run if 'source_code_filename' is None using only name as pattern. if source_code_filename is None: pattern = "{n}*.zip*".format(n=self.component_data_dict["package_name"]) source_code_filename = tools.find_file( self.component_data_dict["sources_directory"], pattern ) # Give error if None if source_code_filename is None: msg = "Can't find source code file for '{n}' with pattern: '{p}'" msg = msg.format(n=self.component_data_dict["name"], p=pattern) printer.error(msg) # Extract tools.extract(source_code_filename) # We get the name of the extracted directory pattern = "{n}*".format(n=self.component_data_dict["package_name"]) # Use 'package_version' in pattern if it is not None if self.component_data_dict["version"] is not None: pattern = "{n}*{v}*".format(n=self.component_data_dict["package_name"], v=self.component_data_dict["version"]) # Find directory using pattern self.component_data_dict["extracted_directory"] = tools.find_directory( self.component_data_dict["sources_directory"], pattern) # Try a second run if 'extracted_directory' is None using only name as pattern. # If found, get the realpath if self.component_data_dict["extracted_directory"] is None: pattern = "{n}*".format(n=self.component_data_dict["package_name"]) self.component_data_dict["extracted_directory"] = tools.find_directory( self.component_data_dict["sources_directory"], pattern ) else: self.component_data_dict["extracted_directory"] = os.path.realpath( self.component_data_dict["extracted_directory"] ) # Fail if not found 'extracted_directroy' if self.component_data_dict["extracted_directory"] is None: msg = "Can't find extracted directory for '{n}' with pattern: '{p}'" msg = msg.format(n=self.component_data_dict["name"], p=pattern) printer.error(msg) # Generate build_dir if necessary. if self.component_data_dict["require_build_dir"] is True: # Generate and save 'build_directory' path value = os.path.realpath( os.path.join( self.component_data_dict["extracted_directory"], self.build_directory_name ) ) tools.add_to_dictionary(self.component_data_dict, "build_directory_path", value, concat=False) # Create directory tools.create_directory(self.component_data_dict["build_directory_path"]) else: # If not, build component into the extracted directory tools.add_to_dictionary(self.component_data_dict, "build_directory_path", self.component_data_dict["extracted_directory"], concat=False) # Set directory owner if we are building the 'toolchain' if self.component_data_dict["builder_name"] == "toolchain": tools.set_recursive_owner_and_group(self.component_data_dict["extracted_directory"], self.component_data_dict["run_as_username"])