def modify_grub_configuration(self): template_file = join_path(self.info.data_dir, 'grub.install.cfg') template = read_file(template_file) rootflags = "rootflags=sync" if self.info.distro.name=='LliureX-Live': isopath = unix_path(self.info.iso_path) dic = dict( custom_installation_dir = unix_path(self.info.custominstall), iso_path = isopath, keyboard_variant = self.info.keyboard_variant, keyboard_layout = self.info.keyboard_layout, locale = self.info.locale, accessibility = self.info.accessibility, kernel = unix_path(self.info.kernel), initrd = unix_path(self.info.initrd), rootflags = rootflags, title1 = "Booting the LliureX installation.", title2 = "For more boot options, press `ESC' now...", lliurex_mode_title = "LliureX", normal_mode_title = "LliureX Live", #pae_mode_title = "PAE mode", # safe_graphic_mode_title = "Safe graphic mode", #intel_graphics_workarounds_title = "Intel graphics workarounds", #nvidia_graphics_workarounds_title = "Nvidia graphics workarounds", #acpi_workarounds_title = "ACPI workarounds", #verbose_mode_title = "Verbose mode", #demo_mode_title = "Demo mode", ) else: isopath = "" kernel='' initrd='' dic=dict( lliurex_mode_title = "LliureX", title1 = "Booting the LliureX installation.", title2 = "For more boot options, press `ESC' now...", rootflags = rootflags, custom_installation_dir = unix_path(self.info.custominstall), iso_path = isopath, keyboard_variant = self.info.keyboard_variant, keyboard_layout = self.info.keyboard_layout, locale = self.info.locale, accessibility = self.info.accessibility, ) ## TBD at the moment we are extracting the ISO, not the CD content #~ elif self.info.cd_path: #~ isopath = unix_path(self.info.cd_path) content = template for k,v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) if self.info.run_task == "cd_boot": content = content.replace(" automatic-ubiquity", "") content = content.replace(" iso-scan/filename=", "") grub_config_file = join_path(self.info.install_boot_dir, "grub", "grub.cfg") write_file(grub_config_file, content)
def modify_grub_configuration(self): template_file = join_path(self.info.data_dir, 'grub.install.cfg') template = read_file(template_file) #if self.info.run_task == "cd_boot": # isopath = "" if self.info.iso_path: isopath = unix_path(self.info.iso_path) if self.info.target_drive.is_fat(): rootflags = "rootflags=sync" else: rootflags = "rootflags=syncio" if self.info.run_task == "cd_boot": title = "StartOS LiveCD" elif self.info.flag: title = "StartOS" else: title = "StartOS LiveCD" if self.info.run_task == "cd_boot": mode = "" elif self.info.flag: mode = "install-automatic" else: mode = "" dic = dict( title1 = "Completing the StartOS installation.", title2 = "For more installation boot options, press `ESC' now...", normal_mode_title = title, kernel = unix_path(self.info.kernel), iso_path = isopath, install_mode = mode, locale = self.info.locale, keyboard_layout = self.info.keyboard_layout, keyboard_variant = self.info.keyboard_variant, rootflags = rootflags, initrd = unix_path(self.info.initrd), ) content = template for k,v in dic.items(): k = "$(%s)" % k log.debug("%s,%s" %(k,v)) content = content.replace(k, v) #if self.info.run_task == "cd_boot": # content = content.replace(" automatic-ubiquity", "") # content = content.replace(" iso-scan/filename=", "") grub_config_file = join_path(self.info.install_boot_dir, "grub", "grub.cfg") log.debug("grub_config_file === %s" %grub_config_file) write_file(grub_config_file, content)
def get_objects_to_upload(self, objects): """ get_objects_to_upload(self, objects) Given a list of object paths, return a dictionary containing meatdata of file or files within a directory. """ expanded_objects = [] objects_metadata = {} # Loop though objects to build list of all files to upload for obj in objects: # Use objects full path when building list of objects obj = utils.unix_path(os.path.abspath(obj)) # Only upload objects within "shelves" directory if not obj.startswith((self.git_path)): self.logger.error("Object %s is not within %s " % (obj, self.paths['shelves'])) sys.exit(1) # Build list of objects if os.path.isfile(obj): expanded_objects.append(obj) elif os.path.isdir(obj): for (root, dirs, files) in os.walk(obj): for f in files: obj = os.path.join(root, f) expanded_objects.append(obj) else: self.logger.warn("Local file '%s' not found" % obj) # Process list of object to calcuate file size, modified time and hash objects_metadata = self.process_objects(expanded_objects) return objects_metadata
def get_objects_on_disk(self): """ get_objects_on_disk(self) Walk though local storage and build one giant dictionary of objects on disk """ objects_on_disk = {} download_path = self.options['dest_sync'] if 'shelf' in self.options: download_path = os.path.join(download_path, self.options['shelf']) for (root, dirs, files) in os.walk(download_path): for f in files: obj = os.path.join(root, f) object_name = utils.unix_path( os.path.relpath(obj, self.options['dest_sync'])) # Return sha1 hash if checksum is enabled if self.options['checksum']: objects_on_disk.update( {object_name: { 'sha1_hash': utils.get_sha1(obj), }}) else: objects_on_disk.update({ object_name: { 'modified_time': utils.get_modified_time(obj), 'file_size': utils.get_file_size(obj) } }) return objects_on_disk
def get_objects_on_disk(self): """ get_objects_on_disk(self) Walk though local storage and build one giant dictionary of objects on disk """ objects_on_disk = {} download_path = self.options['dest_sync'] if 'shelf' in self.options: download_path = os.path.join(download_path, self.options['shelf']) for (root, dirs, files) in os.walk(download_path): for f in files: obj = os.path.join(root, f) object_name = utils.unix_path( os.path.relpath(obj, self.options['dest_sync']) ) # Return sha1 hash if checksum is enabled if self.options['checksum']: objects_on_disk.update({object_name: { 'sha1_hash': utils.get_sha1(obj), }}) else: objects_on_disk.update({object_name: { 'modified_time': utils.get_modified_time(obj), 'file_size': utils.get_file_size(obj) }}) return objects_on_disk
def create_preseed(self): template_file = join_path(self.info.data_dir, "preseed." + self.info.distro.name) if not os.path.exists(template_file): template_file = join_path(self.info.data_dir, "preseed.lupin") template = read_file(template_file) partitioning = "" partitioning += "d-i partman-auto/disk string LIDISK\n" partitioning += "d-i partman-auto/method string loop\n" partitioning += "d-i partman-auto-loop/partition string LIPARTITION\n" partitioning += "d-i partman-auto-loop/recipe string \\\n" disks_dir = unix_path(self.info.disks_dir) + "/" if self.info.root_size_mb: partitioning += ( " %s 3000 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ / } . \\\n" % (disks_dir + "root.disk", self.info.root_size_mb, self.info.root_size_mb) ) if self.info.swap_size_mb: partitioning += " %s 100 %s %s linux-swap method{ swap } format{ } . \\\n" % ( disks_dir + "swap.disk", self.info.swap_size_mb, self.info.swap_size_mb, ) if self.info.home_size_mb: partitioning += ( " %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /home } . \\\n" % (disks_dir + "home.disk", self.info.home_size_mb, self.info.home_size_mb) ) if self.info.usr_size_mb: partitioning += ( " %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /usr } . \\\n" % (disks_dir + "usr.disk", self.info.usr_size_mb, self.info.usr_size_mb) ) partitioning += "\n" safe_host_username = self.info.host_username.replace(" ", "+") user_directory = self.info.user_directory.replace("\\", "/")[2:] host_os_name = "Windows XP Professional" # TBD password = md5_password(self.info.password) dic = dict( timezone=self.info.timezone, password=password, user_full_name=self.info.user_full_name, distro_packages=self.info.distro.packages, host_username=self.info.host_username, username=self.info.username, partitioning=partitioning, user_directory=user_directory, safe_host_username=safe_host_username, host_os_name=host_os_name, ) content = template for k, v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) preseed_file = join_path(self.info.custominstall, "preseed.cfg") write_file(preseed_file, content)
def modify_grub_configuration(self): template_file = join_path(self.info.data_dir, 'grub.install.cfg') template = read_file(template_file) if self.info.run_task == "cd_boot": isopath = "" ## TBD at the moment we are extracting the ISO, not the CD content #~ elif self.info.cd_path: #~ isopath = unix_path(self.info.cd_path) elif self.info.iso_path: isopath = unix_path(self.info.iso_path) if self.info.target_drive.is_fat(): rootflags = "rootflags=sync" else: rootflags = "rootflags=syncio" dic = dict( custom_installation_dir = unix_path(self.info.custominstall), iso_path = isopath, keyboard_variant = self.info.keyboard_variant, keyboard_layout = self.info.keyboard_layout, locale = self.info.locale, accessibility = self.info.accessibility, kernel = unix_path(self.info.kernel), initrd = unix_path(self.info.initrd), rootflags = rootflags, title1 = "Completing the Linux Mint installation.", title2 = "For more installation boot options, press `ESC' now...", normal_mode_title = "Normal mode", safe_graphic_mode_title = "Safe graphic mode", acpi_workarounds_title = "ACPI workarounds", verbose_mode_title = "Verbose mode", demo_mode_title = "Demo mode", ) content = template for k,v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) if self.info.run_task == "cd_boot": content = content.replace(" automatic-ubiquity", "") content = content.replace(" iso-scan/filename=", "") grub_config_file = join_path(self.info.install_boot_dir, "grub", "grub.cfg") write_file(grub_config_file, content)
def create_preseed(self): template_file = join_path(self.info.data_dir, 'preseed.' + self.info.distro.name) if not os.path.exists(template_file): template_file = join_path(self.info.data_dir, 'preseed.lupin') template = read_file(template_file) if self.info.distro.packages: distro_packages_skip = '' else: distro_packages_skip = '#' partitioning = "" partitioning += "d-i partman-auto/disk string LIDISK\n" partitioning += "d-i partman-auto/method string loop\n" partitioning += "d-i partman-auto-loop/partition string LIPARTITION\n" partitioning += "d-i partman-auto-loop/recipe string \\\n" disks_dir = unix_path(self.info.disks_dir) + '/' if self.info.root_size_mb: partitioning += ' %s 3000 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ / } . \\\n' \ %(disks_dir + 'root.disk', self.info.root_size_mb, self.info.root_size_mb) if self.info.swap_size_mb: partitioning += ' %s 100 %s %s linux-swap method{ swap } format{ } . \\\n' \ %(disks_dir + 'swap.disk', self.info.swap_size_mb, self.info.swap_size_mb) if self.info.home_size_mb: partitioning += ' %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /home } . \\\n' \ %(disks_dir + 'home.disk', self.info.home_size_mb, self.info.home_size_mb) if self.info.usr_size_mb: partitioning += ' %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /usr } . \\\n' \ %(disks_dir + 'usr.disk', self.info.usr_size_mb, self.info.usr_size_mb) partitioning += "\n" safe_host_username = self.info.host_username.replace(" ", "+") user_directory = self.info.user_directory.replace("\\", "/")[2:] host_os_name = "Windows XP Professional" #TBD password = md5_password(self.info.password) dic = dict( timezone=self.info.timezone, password=password, user_full_name=self.info.user_full_name, distro_packages_skip=distro_packages_skip, distro_packages=self.info.distro.packages, host_username=self.info.host_username, username=self.info.username, partitioning=partitioning, user_directory=user_directory, safe_host_username=safe_host_username, host_os_name=host_os_name, ) content = template for k, v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) preseed_file = join_path(self.info.custominstall, "preseed.cfg") write_file(preseed_file, content)
def modify_grub_configuration(self): template_file = join_path(self.info.data_dir, 'grub.install.cfg') template = read_file(template_file) if self.info.run_task == "cd_boot": isopath = "" ## TBD at the moment we are extracting the ISO, not the CD content #~ elif self.info.cd_path: #~ isopath = unix_path(self.info.cd_path) elif self.info.iso_path: isopath = unix_path(self.info.iso_path) rootflags = "rootflags=sync" dic = dict( custom_installation_dir=unix_path(self.info.custominstall), iso_path=isopath, keyboard_variant=self.info.keyboard_variant, keyboard_layout=self.info.keyboard_layout, locale=self.info.locale, accessibility=self.info.accessibility, kernel=unix_path(self.info.kernel), initrd=unix_path(self.info.initrd), rootflags=rootflags, title1="Completing the Ubuntu installation.", title2="For more installation boot options, press `ESC' now...", normal_mode_title="Normal mode", safe_graphic_mode_title="Safe graphic mode", acpi_workarounds_title="ACPI workarounds", verbose_mode_title="Verbose mode", demo_mode_title="Demo mode", ) content = template for k, v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) if self.info.run_task == "cd_boot": content = content.replace(" automatic-ubiquity", "") content = content.replace(" iso-scan/filename=", "") grub_config_file = join_path(self.info.install_boot_dir, "grub", "grub.cfg") write_file(grub_config_file, content)
def get_objects_to_upload(self, objects): """ get_objects_to_upload(self, objects) Given a list of object paths, return a dictionary containing meatdata of file or files within a directory. """ expanded_objects = [] objects_metadata = {} # Loop though objects to build list of all files to upload for obj in objects: # Use objects full path when building list of objects obj = utils.unix_path(os.path.abspath(obj)) # Only upload objects within "shelves" directory if not obj.startswith((self.git_path)): self.logger.error( "Object %s is not within %s " % (obj, self.paths['shelves']) ) sys.exit(1) # Build list of objects if os.path.isfile(obj): expanded_objects.append(obj) elif os.path.isdir(obj): for (root, dirs, files) in os.walk(obj): for f in files: obj = os.path.join(root, f) expanded_objects.append(obj) else: self.logger.warn("Local file '%s' not found" % obj) # Process list of object to calcuate file size, modified time and hash objects_metadata = self.process_objects(expanded_objects) return objects_metadata
def process_objects(self, expanded_objects=[]): """ process_objects(expanded_objects) Given a list of objects, determines if uploadable (binary), and then create a dictionary of: sha1_hash sha256_hash modified_time filesize Sha1_hash is only determined on first upload or if modified time and file size changed. """ objects_metadata = {} for obj in expanded_objects: # Process if object is uploadable if self.uploadable_object(obj): # Object name in metadata file. Replace \\ with / to remain consistent # accoss platforms object_name = utils.unix_path( os.path.relpath(obj, self.paths['shelves'])) # Determine paths object_path = os.path.abspath(obj) object_metadata_file = '%s.pitem' % object_path # Add object to gitignore self.add_object_to_gitignore(obj) object_mtime = utils.get_modified_time(obj) object_file_size = utils.get_file_size(obj) # Use cached checksum since checksum hashing is cpu intensive and # file size and modified times are quicker. Checksums are force using # cli flag --checksum. if (not self.options['checksum'] and os.path.exists(object_metadata_file)): with open(object_metadata_file) as json_file: cached_metadata = json.load(json_file) # Use cached hash if filesize and mtime are the same if (object_file_size == cached_metadata[object_name]['file_size'] and object_mtime == cached_metadata[object_name]['modified_time']): object_sha1_hash = cached_metadata[object_name][ 'sha1_hash'] if 'sha26_hash' in cached_metadata[object_name]: object_sha256_hash = cached_metadata[object_name][ 'sha256_hash'] else: object_sha256_hash = utils.get_sha256(obj) else: object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) else: # Genertate hash if cached_metadat is not present object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) # TODO remove sha1 check as its not needed. # Add object to metadata dictionary objects_metadata[object_name] = { 'sha1_hash': object_sha1_hash, 'sha256_hash': object_sha256_hash, 'modified_time': object_mtime, 'file_size': object_file_size, } return objects_metadata
def process_objects(self, expanded_objects=[]): """ process_objects(expanded_objects) Given a list of objects, determines if uploadable (binary), and then create a dictionary of: sha1_hash sha256_hash modified_time filesize Sha1_hash is only determined on first upload or if modified time and file size changed. """ objects_metadata = {} for obj in expanded_objects: # Process if object is uploadable if self.uploadable_object(obj): # Object name in metadata file. Replace \\ with / to remain consistent # accoss platforms object_name = utils.unix_path( os.path.relpath(obj, self.paths['shelves']) ) # Determine paths object_path = os.path.abspath(obj) object_metadata_file = '%s.pitem' % object_path # Add object to gitignore self.add_object_to_gitignore(obj) object_mtime = utils.get_modified_time(obj) object_file_size = utils.get_file_size(obj) # Use cached checksum since checksum hashing is cpu intensive and # file size and modified times are quicker. Checksums are force using # cli flag --checksum. if ( not self.options['checksum'] and os.path.exists(object_metadata_file) ): with open(object_metadata_file) as json_file: cached_metadata = json.load(json_file) # Use cached hash if filesize and mtime are the same if ( object_file_size == cached_metadata[object_name]['file_size'] and object_mtime == cached_metadata[object_name]['modified_time'] ): object_sha1_hash = cached_metadata[object_name]['sha1_hash'] if 'sha26_hash' in cached_metadata[object_name]: object_sha256_hash = cached_metadata[object_name]['sha256_hash'] else: object_sha256_hash = utils.get_sha256(obj) else: object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) else: # Genertate hash if cached_metadat is not present object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) # TODO remove sha1 check as its not needed. # Add object to metadata dictionary objects_metadata[object_name] = { 'sha1_hash': object_sha1_hash, 'sha256_hash': object_sha256_hash, 'modified_time': object_mtime, 'file_size': object_file_size, } return objects_metadata