def collect_default_projects(single_image, workspace_file, inputpath): """ Collect the default project dependencies and combine them into a single image. """ # Build and collect the input xuv files from the default project in the workspace workspace = Workspace( workspace_file, PrepareForImage(single_image=single_image, inputpath=inputpath)) workspace.build_default_project()
def collect_all_workspace_projects(single_image, workspace_file, inputpath): """ Collect all workspace projects and combine them into a single image. Build and collect the input xuv files from all the projects in the workspace """ workspace = Workspace( workspace_file, PrepareForImage(single_image=single_image, inputpath=inputpath)) workspace.build()
def _search_projects_for_flash_config_file(self): """ Look for a flash configuration file in the default configuration of the app/p1 project file. """ ws_projects = Workspace(self._workspace_file).parse() # Go through the projects in the x2w file for project in ws_projects.values(): # Try and get the flash configuration file from the project flash_config_file = self._search_project_for_flash_config_file(project.filename) if flash_config_file: # The flash configuration file has been found so return it return flash_config_file # The flash configuration file has not been found in the projects return None
def search_projects_for_chip_type(devkit_root, workspace_file): """ Look for what memory type options are available """ ws_projects = Workspace(workspace_file).parse() # Go through the projects in the x2w file and find dev_cfg_filesystem for project in ws_projects.values(): # Only going to work on project 'dev_cfg_filesystem' if project.default_configuration.properties.get( 'TYPE') == 'device_config': # Try and get the chip type from the project return search_project_for_chip_type(devkit_root, project, workspace_file) return None
def search_projects_for_chip_type(devkit_root, workspace_file): """ Look for a chip type in the default configuration of the app/p1 project file. """ ws_projects = Workspace(workspace_file).parse() # Go through the projects in the x2w file for project in ws_projects.values(): # Try and get the chip type from the project chip_type = \ search_project_for_chip_type(devkit_root, project.filename, workspace_file) if chip_type: # The chip type has been found so return it return chip_type # The chip type has not been found in the projects return None
def get_crypto_key(script_args): """ Search all projects in the workspace to find where the crypto key is and extract it. :param script_args: :return: the crypto key needed to encrypt the PS storage """ crypto_key = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] # Get the project paths from the workspace because we need to be able # to find information from other projects # Get the list of the workspace proj_path ws_projects = Workspace(script_args.workspace_file).parse() # Go through each project, find the filesystem and then look for *.htf files under different filesystems # and scan them to find the encryption key "PsAesKey" for proj in ws_projects.values(): config = proj.get_properties('filesystem') filesystem_type = config.get('TYPE') if filesystem_type in ("firmware_config", "curator_config", "device_config", "user_ps"): project_files = proj.files htf_files = [ f for f in project_files if os.path.splitext(f.lower())[1] == '.htf' ] for file in htf_files: with open(file, 'r') as htf_file: file_content = htf_file.read().splitlines() for i in range(len(file_content)): # Do not consider the key or anything else which is commented out file_content[i] = re.sub("#.*$", "", file_content[i]) if "PsAesKey" in file_content[i]: # PsAesKey = [ 00 2f 00 80 00 00 00 00 00 00 00 00 00 00 00 10] # after splitting ['PsAesKey ', ' [ 00 2f 00 80 00 00 00 00 00 00 00 00 00 00 00 10]'] crypto_key = file_content[i].split("=")[-1:] # removing "[ ]" and extra spaces crypto_key = crypto_key[0].replace( "[", "").replace("]", "").replace(" ", "") # creating 16 elements, each octet long this is what P0 expects # e.g. [0, 47, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16] crypto_key = [ int(crypto_key[i:i + 2], 16) for i in range(0, len(crypto_key), 2) ] return crypto_key
def collect_all_workspace_projects(single_image, workspace_file, inputpath): """ Collect all workspace projects and combine them into a single image. Build and collect the input xuv files from all the projects in the workspace """ workspace = Workspace(workspace_file).parse() for project in workspace.values(): if not single_image.process_project(project.filename, inputpath): # An error has occured print("Failed to process %s" % project) sys.stdout.flush() return False sys.stdout.flush() return True
def search_projects_for_chip_type(devkit_root, workspace_file): """ Look for what memory type options are available """ ws_projects = Workspace(workspace_file).parse() # Go through the projects in the x2w file and find dev_cfg_filesystem for project in ws_projects.values(): # Only going to work on project 'dev_cfg_filesystem' if "dev_cfg_filesystem" == project.name: # Try and get the chip type from the project memory_options_list = \ search_project_for_chip_type(devkit_root, project.filename, workspace_file) if memory_options_list: # The chip type has been found so return it return memory_options_list # The chip type has not been found in the projects return None
def collect_all_workspace_projects(self, workspace, inputpath, build_output_folder=None): """ Collect all workspace projects and combine them into a single image. """ # Build and collect the input xuv files from all the projects in the workspace ws_projects = Workspace(workspace).parse() for project in ws_projects.values(): proceed, _ = self.process_project(project.filename, inputpath) if not proceed: # An error has occured print("Failed to process %s" % project.filename) sys.stdout.flush() return False sys.stdout.flush() return True
def get_projects_from_workspace(workspace_file): """ Given a workspace file (x2w) will return the Path to the apps0, main project and kymera_audio projects contained within """ project_dict = {} workspace_name = os.path.basename(workspace_file) main_proj_name, dont_care = os.path.splitext(workspace_name) main_proj_name = main_proj_name + ".x2p" ws_projects = Workspace(workspace_file).parse() for child in ws_projects.keys(): project = ws_projects[child].filename if "apps0_firmware.x2p" in project: project_dict["apps0"] = project if "audio_firmware.x2p" in project: project_dict["audio_image"] = project if "kymera_audio.x2p" in project: project_dict["audio_package"] = project if main_proj_name in project: project_dict["apps1"] = project return project_dict
def zip_it(root_dir, workspace, out_dir, base_name): """ Archive the xcd file and any elf and lst files found below the root_dir - the root directory of the devkit - the output directory containing the xcd and log files, and to contain the zip file containing those and any elf and lst files found - the base string used for the name of the xcd, log and zip files """ zip_filename = base_name + ".zip" zip_filespec = os.path.join(out_dir, zip_filename) # Create the archive for writing "deflated" (compressed) files to try: with zipfile.ZipFile(zip_filespec, mode="w", compression=zipfile.ZIP_DEFLATED) as zip: # So put the xcd file in it... xcd_arcname = base_name + ".xcd" xcd_filespec = os.path.join(out_dir, xcd_arcname) assert os.path.isfile(xcd_filespec) print("Adding {} to archive {}\n".format(xcd_arcname, zip_filespec)) zip.write(xcd_filespec, xcd_arcname) # ... and put the log file in it ... log_arcname = base_name + ".log" log_filespec = os.path.join(out_dir, log_arcname) if os.path.isfile(log_filespec): print("Adding {} to archive {}\n".format( log_arcname, zip_filespec)) zip.write(log_filespec, log_arcname) else: print("{} not found\n".format(log_filespec)) # ... and put any *.elf/*.lst files found in the workspace in it print( "Looking in {} project locations for *.elf and *.lst files\n". format(workspace)) sys.stdout.flush() ws_projects = Workspace(workspace).parse() for proj in ws_projects.keys(): project = ws_projects[proj].filename import maker.parse_proj_file as pproj proj_parser = pproj.Project(project, root_dir, workspace) print("Processing project %s\n" % project) elffile, lstfile = process_project(project, proj_parser) if elffile is not None: _, elffilename = os.path.split(elffile) print("Adding %s to archive %s\n" % (elffile, zip_filespec)) sys.stdout.flush() zip.write(elffile, elffilename) if lstfile is not None: _, lstfilename = os.path.split(lstfile) print("Adding %s to archive %s\n" % (lstfile, zip_filespec)) sys.stdout.flush() zip.write(lstfile, lstfilename) else: print("No elf file found for this project\n") print("Please send the {} file to your".format(zip_filespec)) print( "Qualcomm Technologies International, Ltd. representative.\n") except OSError as exception: print("Error with zip file {} failed; error {}, {}\n" \ .format(zip_filespec, exception.errno, os.strerror(exception.errno))) sys.stdout.flush()
def build_filesystem(proj, devkit, script_args, crypto_key, build_output_folder): """ Build any of the possible flavours of the filesystem project """ devkit_root = devkit.root register_pylib_path(devkit_root) working_dir = proj.dirname workspace_file = script_args.workspace_file # The option to use NvsCmd for deploying is passed in the Ubuild --special option. # If using NvsCmd to deploy, the filesystems must be built using the correct endian format. appsFs = False try: if use_nvscmd(script_args): appsFs = True except AttributeError: appsFs = False project_files = proj.files tool_root = devkit.root config = proj.get_properties('filesystem') output_filename = config.get('OUTPUT', None) try: filesystem_type = config['TYPE'] except KeyError as excep: print("ERROR! Build Setting {} missing in project. {}".format(excep, proj.name)) return False if filesystem_type == "curator_config": # Curator config filesystem should not use the appsFs argument appsFs = False if not output_filename: output_filename = filesystem_type + "_filesystem" if appsFs: # This is a build for an apps filesystem that is needed in the form # required by DFU, rather than in the form required for deploy output_filename = output_filename + "_dfu" if build_output_folder is not None: xuv_path = os.path.join(build_output_folder, output_filename + '.xuv') else: xuv_path = os.path.join(working_dir, output_filename + '.xuv') def __get_hydracode_sdbfile_path(proj): """ Lets see if the project has specified a specific hydracore sdb file to use If so then then there will be a property HYDRACORE_CONFIG_SDB_FILE defined in the project.x2p file and set to the path to use examples <property name="HYDRACORE_CONFIG_SDB_FILE">sdk://tools/config/hydracore_config_ALTERNATIVE1.sdb</property> <property name="HYDRACORE_CONFIG_SDB_FILE">../../MY_hydracore_config.sdb</property> <property name="HYDRACORE_CONFIG_SDB_FILE">C:\TEST_CONFIGS\hydracore_config.sdb</property> If this field is defined then that is what we are going to use and checks are made to ensure present. If the field is NOT defined or empty then walk up from the project file and search for the 'adk' folder, then: adk/bin/<chip_name>/hydracore_config.sdb """ sdb_file = None config = proj.get_properties('filesystem') attribs = { 'type': 'warning', 'config': 'filesystem', 'core': 'apps/p1', 'module': 'build' } sdb_file_override = config.get('HYDRACORE_CONFIG_SDB_FILE') # Check to see if project override exists if sdb_file_override is not None and len(sdb_file_override) > 0: if os.path.isabs(sdb_file_override): sdb_override_full_path = sdb_file_override else: sdb_override_full_path = os.path.realpath(os.path.join(proj.dirname, sdb_file_override)) if not os.path.isfile(sdb_override_full_path): msg = ["WARNING - Can not find HYDRACORE_CONFIG_SDB_FILE defined file = {}".format(sdb_override_full_path)] if sdb_file_override != sdb_override_full_path: msg += [ "Property HYDRACORE_CONFIG_SDB_FILE is defined as = {}".format(sdb_file_override), "Default to looking for the SDB file in the current device's bin folder" ] bdex.log_buildproc_output('buildoutput', attribs, "\n".join(msg)) else: sdb_file = sdb_override_full_path if not sdb_file: for current_dir, contents in walk_up(proj.dirname): if os.path.basename(current_dir) == 'adk': if 'bin' in contents: try: sdb_file = glob.glob(os.path.join(current_dir, 'bin', config['CHIP_TYPE'], '*.sdb'))[0] except IndexError: sdb_file = None else: bdex.raise_bd_err('INVALID_CONFIG', "Can not find bin folder for this branch: {}".format(current_dir)) if not sdb_file or not os.path.isfile(sdb_file): bdex.raise_bd_err('INVALID_CONFIG', "Can not find a suitable HYDRACORE_CONFIG_SDB_FILE") return sdb_file def get_ps_store_size(): ''' The ps store size is calculated as 1/2 of the rw_config size ''' from prepare_single_image import PrepareSingleImage prepare_image = PrepareSingleImage(devkit_root, script_args.workspace_file, None) flash_config = prepare_image.flash_config rw_config_size = 0 for section, attrs in flash_config.get("layout", None): if section == "rw_config": rw_config_size = attrs.get("capacity", 0) if rw_config_size == 0: raise Exception('Flash config layout must contain a valid rw_config section') flash_device = flash_config.get('flash_device', None) if not flash_device: raise Exception('flash config must contain a flash_device section') block_size = flash_device.get('block_size', 0) if ((rw_config_size / block_size) % 2) != 0: raise Exception('rw_config size must be an even number of blocks') return rw_config_size // 2 def gather_files(proj, sdb_system_name, image_directory): """ Helper function that grabs all htfs and compiles them into image_directory, and copies all hcfs into the same directory """ supported_fs_ext = [".hcf", ".dkcs"] sdb_file = __get_hydracode_sdbfile_path(proj) if sdb_file is None: return False print('SDB File: {}'.format(sdb_file)) print('tool_root = {}'.format(tool_root)) config_command = os.path.join(tool_root, "tools", "bin", "ConfigCmd.exe") for cfg_file in project_files: if os.path.splitext(cfg_file)[1] == ".htf": cmd_line = [config_command, "-noprefix", "binary", cfg_file, image_directory, "-system", sdb_system_name, "-quiet", "-database", sdb_file] if not launch_simple_cmd(cmd_line): print("ConfigCmd failed: invoked as '%s'" % " ".join(cmd_line)) return False elif os.path.splitext(cfg_file)[1] in supported_fs_ext: # Precompiled - just copy into place shutil.copy(cfg_file, image_directory) return True if filesystem_type in ("firmware_config", "curator_config", "device_config"): # Grab the firmware htfs and the sdbs from the image projects and run # configcmd to produce the contents of a local images directory. Then # run packfile on it. sdb_system_name = config["system_label"] image_directory = tempfile.mkdtemp() if not gather_files(proj, sdb_system_name, image_directory): return False _run_prepare_fs(tool_root, image_directory, xuv_path, appsFs=appsFs) def make_writeable(func, path, exc_info): if func is os.remove: os.chmod(path, 0o640) func(path) shutil.rmtree(image_directory, onerror=make_writeable) elif filesystem_type == "user_ps": # Grab the user ps htf and convert it to XUV using the psflash_converter # module htf_files = [f for f in project_files if os.path.splitext(f.lower())[1] == '.htf'] if len(htf_files) > 0: print("Building user key persistent store image") ps_store_size = get_ps_store_size() from csr.dev.fw.psflash_converter import PsflashConverter try: psfs = PsflashConverter(crypto_key, stores_offset=0, store_size=ps_store_size) except TypeError: # TODO: Older API needed a default crypto_key to be passed to # PsflashConverter. This can be removed once all builds use # the PsflashConverter implementation which comes with its own default. crypto_key = (0, 0, 0, 0) psfs = PsflashConverter(crypto_key, stores_offset=0, store_size=ps_store_size) # We also need to push the PS keys into the SQIF # 1. Load the htf psfs.convert(htf_files, xuv_path) else: print("No PS keys to flash") # Better delete any xuv file that might have been hanging around # so we don't accidentally flash it later if os.path.isfile(xuv_path): os.remove(xuv_path) elif filesystem_type == "customer_ro": # Point packfile at the customer-supplied filesystem root to produce an # XUV # Temporary: this filesystem needs to contain the Apps config # as well as any customer RO filesystem try: fs_root = config["FS_ROOT"] no_setting = False if fs_root else True except KeyError: no_setting = True quick_charge_config_exists = bool('QUICKCHARGE_CONFIG' in config) sdb_system_name = config["system_label"] # Create a temporary directory to gather everything into image_directory = tempfile.mkdtemp() ws_projects = Workspace(workspace_file).parse() for name, project in ws_projects.items(): proj_subsystem = project.default_configuration.properties.get('SUBSYSTEM_NAME') if "audio" in name or "audio" == proj_subsystem: project_files += get_capabilities_files_from_props(config, project) break bundle_files = [x for x in project_files if x.endswith("dkcs")] for bundle in bundle_files: # Get the associated ELF bundle_elf = os.path.splitext(bundle)[0] + ".elf" if os.path.isfile(bundle_elf): # Now report the ELF to the IDE for loading when debugging attribs = collections.OrderedDict() attribs['type'] = 'info' attribs['config'] = script_args.configuration # bundles are only for audio SS attribs["core"] = "audio/p0" attribs['module'] = 'deploy' bdex.log_buildproc_output('elfpath', attribs, bundle_elf) print("\nCopying files to %s filesystem...\n" % filesystem_type) # Firstly, copy any files that are added to the customer RO filesystem project if project_files: print("\nCopying files added to %s filesystem project...\n" % filesystem_type) for ro_file in project_files: print("Copying file %s" % ro_file) shutil.copy(ro_file, image_directory) sys.stdout.flush() # Then, if there is a FS_ROOT directory specified in the customer RO filesystem project properties, # copy all the files under this root directory if not no_setting: if not os.path.isdir(fs_root): # Assume it's relative to the project root fs_root = os.path.normpath(os.path.join(working_dir, fs_root)) if not os.path.isdir(fs_root): print("FS_ROOT directory does not exist.\nCreating: {}".format(fs_root)) os.makedirs(fs_root) if not os.path.isdir(fs_root): return False # Generate the quick charge configuration file if quick_charge_config_exists: quick_charge_config_file = os.path.join( working_dir, config["QUICKCHARGE_CONFIG"] ) quickChargeConfig.quickChargeHexGen.xml_to_hex( quick_charge_config_file, os.path.join(fs_root, "quick_charge_config") ) print("\nCopying files under FS_ROOT...") print("FS_ROOT (%s) with working dir (%s):" % (fs_root, working_dir)) for root, dirs, files in os.walk(fs_root): for file in files: print("Copying file %s\%s" % (root,file)) sys.stdout.flush() copydir(fs_root, image_directory) _run_prepare_fs(tool_root, image_directory, xuv_path, appsFs=appsFs) return True
def main(args): known_args, ubuild_args = parse_args(args) workspace_file = os.path.normpath(known_args.workspace_file) workspace = Workspace(workspace_file, UbuildBuilder(ubuild_args)) return workspace.build()
def main(args): UISTATE_LOCATION_SELECTION = 0 UISTATE_RESPONSE_SELECTION = 1 UISTATE_PROCEED = 2 UISTATE_EXIT = -1 uistate = UISTATE_LOCATION_SELECTION parsed_args = parse_args(args) # Display whatever arguments have been given print("devkit root: %s" % (parsed_args.devkit_root)) print("workspace: %s" % (parsed_args.workspace)) if (parsed_args.nowstring != None): print("nowstring: %s" % (parsed_args.nowstring)) if (parsed_args.response != None): print("response: %s" % (parsed_args.response)) if parsed_args.folder_for_rsa_files != None: print("folder_for_rsa_files: %s" % (parsed_args.folder_for_rsa_files)) sys.stdout.flush() # Add required paths to sys.path if not there already path = os.path.join(parsed_args.devkit_root, "tools", "ubuild") if not path in sys.path: sys.path.append(path) # Set up the use of build.py main from maker.build import build_configs as build_configs build_runner = BuildRunner(parsed_args.devkit_root, build_configs) if (parsed_args.nowstring != None): # The user has provided their own folder name nowstring = parsed_args.nowstring else: nowstring = datetime.strftime(datetime.now(), '%Y%m%d%H%M%S') if parsed_args.folder_for_rsa_files == None: # The -f option has not been given so use the default location # Present the UI to the user to confirm or change that location outpath = os.path.join(os.path.dirname(parsed_args.workspace), "dfu") else: # The -f option has been given so use that location outpath = os.path.abspath(parsed_args.folder_for_rsa_files) uistate = UISTATE_RESPONSE_SELECTION returnValue = 3 if (parsed_args.response != None): # The user has provided a response on the command line # to avoid having to present a UI, for test automation response = str.lower(parsed_args.response) if response == "use": returnValue = 1 elif response == "replace": returnValue = 2 else: print("Invalid -r option {}".format(parsed_args.response)) print('Valid options are "Use" or "Replace"') sys.stdout.flush() return False if uistate is UISTATE_RESPONSE_SELECTION: uistate = UISTATE_PROCEED sys.stdout.flush() tlh = None if uistate is UISTATE_LOCATION_SELECTION or \ uistate is UISTATE_RESPONSE_SELECTION: tlh = TCL_LIBRARY_handler(parsed_args.devkit_root) top = Tkinter.Tk() private_pem_file = os.path.join(outpath, "private.pem") rsa_pss_constants_c_file = os.path.join(outpath, "rsa_pss_constants.c") while uistate != UISTATE_EXIT and uistate != UISTATE_PROCEED: if uistate is UISTATE_LOCATION_SELECTION: if not os.path.isdir(outpath): try: os.makedirs(outpath) print("Created folder %s" % outpath) sys.stdout.flush() returnValue = 3 except (OSError, IOError) as exception: print("Unable to create path {}; error {}. Exit!\n".format( outpath, exception.errno)) sys.stdout.flush() return False newpath = askdirectory(top, outpath) if newpath is "": # The directory selection has been cancelled # Cancel is exit print("Cancelled\n") sys.stdout.flush() return False if not os.path.isdir(newpath): try: os.makedirs(newpath) print("Created folder %s" % newpath) sys.stdout.flush() returnValue = 3 except (OSError, IOError) as exception: print("Unable to create path {}; error {}. Exit!\n".format( newpath, exception.errno)) sys.stdout.flush() return False outpath = newpath sys.stdout.flush() private_pem_file = os.path.join(outpath, "private.pem") rsa_pss_constants_c_file = os.path.join(outpath, "rsa_pss_constants.c") if (parsed_args.response is None): uistate = UISTATE_RESPONSE_SELECTION else: uistate = UISTATE_PROCEED elif uistate is UISTATE_RESPONSE_SELECTION: if os.path.isfile(private_pem_file) and \ os.path.isfile(rsa_pss_constants_c_file): # There are existing files to use or replace. # The above is the minimum set. # Ask the user what to do via a dialog bb = show_button_box(top, outpath) returnValue = 0 while returnValue is 0: returnValue = bb.returnValue() if returnValue == -1: # Cancelling if parsed_args.folder_for_rsa_files == None: # Back to location selection UI uistate = UISTATE_LOCATION_SELECTION else: # Folder given so no location selectio UI # Cancel is exit print("Cancelled\n") sys.stdout.flush() return False else: uistate = UISTATE_PROCEED else: returnValue = 3 uistate = UISTATE_PROCEED if tlh is not None: tlh.close() if not os.path.isdir(outpath): try: os.makedirs(outpath) print("Created folder %s" % outpath) returnValue = 3 except (OSError, IOError) as exception: print("Unable to create path {}; error {}. Exit!\n".format( outpath, exception.errno)) return False # Set up the file specs for the files to of interest for archive/create filelist = [] private_pem_file = os.path.join(outpath, "private.pem") filelist.append(private_pem_file) public_pem_file = os.path.join(outpath, "public.pem") filelist.append(public_pem_file) dfu_private_key_file = os.path.join(outpath, "dfu-private.key") filelist.append(dfu_private_key_file) dfu_public_key_file = os.path.join(outpath, "dfu-public.key") filelist.append(dfu_public_key_file) rsa_pss_constants_c_file = os.path.join(outpath, "rsa_pss_constants.c") filelist.append(rsa_pss_constants_c_file) # At this point the returnValue should be: # 1 to use (known to already exist), # 2 to replace (known to already exist), or # 3 to create if returnValue >= 2: # If we are to replace or create we need SecurityCmd.exe, # python.exe and gen_rsa_pss_constants.py scexe = os.path.join(parsed_args.devkit_root, "tools", "bin", "SecurityCmd.exe") if not os.path.isfile(scexe): print("{} does not exist\n".format(scexe)) print("Exiting!\n") sys.stdout.flush() return False python_exe = os.path.join(parsed_args.devkit_root, 'tools', 'python27', 'python.exe') if not os.path.isfile(python_exe): print("{} does not exist\n".format(python_exe)) print("Exiting!\n") sys.stdout.flush() return False gen_rsa_pss_constants_py = os.path.join(parsed_args.devkit_root, 'tools', 'gen_rsa_pss_constants.py') if not os.path.isfile(gen_rsa_pss_constants_py): print("{} does not exist\n".format(gen_rsa_pss_constants_py)) print("Exiting!\n") sys.stdout.flush() return False if returnValue == 2: # Going to replace so archive what we already have zip_filename = nowstring + ".zip" zip_filespec = os.path.join(outpath, zip_filename) # Create the archive for writing "deflated" (compressed) files to try: with zipfile.ZipFile(zip_filespec, mode="w", compression=zipfile.ZIP_DEFLATED) as zip: for listfile in filelist: if os.path.isfile(listfile): print("Adding {} to archive {}".format( listfile, zip_filespec)) zip.write(listfile, os.path.basename(listfile)) except (OSError, IOError) as exception: print("Error with zip file {}; error {}, {}\n" \ .format(zip_filespec, exception.errno, os.strerror(exception.errno))) print("Exiting!\n") sys.stdout.flush() return False try: # Having archived all the files without an error, delete them for listfile in filelist: # Ensure the file is not read-only before trying to delete it os.chmod(listfile, stat.S_IWRITE) os.remove(listfile) print("Deleted {}".format(listfile)) except (OSError, IOError) as exception: print("Error deleting file; error {}, {}\n" \ .format(exception.errno, os.strerror(exception.errno))) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False sys.stdout.flush() # Having archived and deleted the original files, now as for create returnValue = 3 if returnValue == 3: # Create the required files cmd_line = [scexe, "-product", "hyd", "creatersakey", "2048", "F4", \ private_pem_file, public_pem_file] print("Invoking '%s'" % " ".join(cmd_line)) sys.stdout.flush() result = subprocess.call(cmd_line) if result != 0: print("'%s' failed" % " ".join(cmd_line)) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False if not os.path.isfile(private_pem_file): print("Failed to create '%s'" % private_pem_file) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False if not os.path.isfile(public_pem_file): print("Failed to create '%s'" % public_pem_file) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False print("'%s' created" % private_pem_file) print("'%s' created" % public_pem_file) cmd_line = [scexe, "-product", "hyd", "pem2dfukey", "prv", \ private_pem_file, dfu_private_key_file] print("Invoking '%s'" % " ".join(cmd_line)) sys.stdout.flush() result = subprocess.call(cmd_line) # We don't care whether that failed or not as the dfu_private_key_file # is not used for any other processing if os.path.isfile(dfu_private_key_file): print("'%s' created" % dfu_private_key_file) cmd_line = [scexe, "-product", "hyd", "pem2dfukey", "pub", \ public_pem_file, dfu_public_key_file] print("Invoking '%s'" % " ".join(cmd_line)) sys.stdout.flush() result = subprocess.call(cmd_line) if result != 0: print("'%s' failed" % " ".join(cmd_line)) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False if not os.path.isfile(dfu_public_key_file): print("Failed to create '%s'" % dfu_public_key_file) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False print("'%s' created" % dfu_public_key_file) cmd_line = [python_exe, gen_rsa_pss_constants_py, "-i", \ dfu_public_key_file] print("Invoking '%s'" % " ".join(cmd_line)) sys.stdout.flush() result = subprocess.call(cmd_line) if result != 0: print("'%s' failed" % " ".join(cmd_line)) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False if not os.path.isfile(rsa_pss_constants_c_file): print("Failed to create '%s'" % rsa_pss_constants_c_file) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False print("'%s' created" % rsa_pss_constants_c_file) # All the required files are now in place # Find the rsa_pss_constants.c in the workspace and replace it ws_projects = Workspace(parsed_args.workspace).parse() for project in ws_projects.keys(): result = process_project(build_runner, parsed_args, ws_projects[project].filename, rsa_pss_constants_c_file, nowstring) if result < 0: # An error has occured restore_archive(nowstring, outpath, filelist) sys.stdout.flush() return False elif result > 0: print("\nPlease rebuild library project %s and your application" \ % ws_projects[project].filename) sys.stdout.flush() break if result == 0: print("\nUnable to find {} in any of the projects".format( rsa_pss_constants_c_file)) sys.stdout.flush() return False sys.stdout.flush() return True
def main(args): DEFAULT_KEY_FOLDER_NAME = "peer_pair_key" KEY_C_FILE_NAME = "peer_pair_le_key.c" UISTATE_LOCATION_SELECTION = 0 UISTATE_RESPONSE_SELECTION = 1 UISTATE_PROCEED = 2 UISTATE_EXIT = -1 uistate = UISTATE_LOCATION_SELECTION parsed_args = parse_args(args) # Display whatever arguments have been given print("devkit root: %s" % (parsed_args.devkit_root)) print("workspace: %s" % (parsed_args.workspace)) if (parsed_args.nowstring != None): print("nowstring: %s" % (parsed_args.nowstring)) if (parsed_args.response != None): print("response: %s" % (parsed_args.response)) if parsed_args.folder_for_key_files != None: print("folder_for_key_files: %s" % (parsed_args.folder_for_key_files)) sys.stdout.flush() # Add required paths to sys.path if not there already path = os.path.join(parsed_args.devkit_root, "tools", "ubuild") if not path in sys.path: sys.path.append(path) # Set up the use of build.py main from maker.build import build_configs as build_configs build_runner = BuildRunner(parsed_args.devkit_root, build_configs) if (parsed_args.nowstring != None): # The user has provided their own folder name nowstring = parsed_args.nowstring else: nowstring = datetime.strftime(datetime.now(), '%Y%m%d%H%M%S') if parsed_args.folder_for_key_files == None: # The -f option has not been given so use the default location # Present the UI to the user to confirm or change that location outpath = os.path.join(os.path.dirname(parsed_args.workspace), DEFAULT_KEY_FOLDER_NAME) else: # The -f option has been given so use that location outpath = os.path.abspath(parsed_args.folder_for_key_files) uistate = UISTATE_RESPONSE_SELECTION returnValue = 3 if (parsed_args.response != None): # The user has provided a response on the command line # to avoid having to present a UI, for test automation response = str.lower(parsed_args.response) if response == "use": returnValue = 1 elif response == "replace": returnValue = 2 else: print("Invalid -r option {}".format(parsed_args.response)) print('Valid options are "Use" or "Replace"') sys.stdout.flush() return False if uistate is UISTATE_RESPONSE_SELECTION: uistate = UISTATE_PROCEED sys.stdout.flush() tlh = None if uistate is UISTATE_LOCATION_SELECTION or \ uistate is UISTATE_RESPONSE_SELECTION: tlh = TCL_LIBRARY_handler(parsed_args.devkit_root) top = Tkinter.Tk() while uistate != UISTATE_EXIT and uistate != UISTATE_PROCEED: if uistate is UISTATE_LOCATION_SELECTION: if not os.path.isdir(outpath): try: os.makedirs(outpath) print("Created folder %s" % outpath) sys.stdout.flush() returnValue = 3 except (OSError, IOError) as exception: print("Unable to create path {}; error {}. Exit!\n".format( outpath, exception.errno)) sys.stdout.flush() return False newpath = askdirectory(top, outpath) if newpath is "": # The directory selection has been cancelled # Cancel is exit print("Cancelled\n") sys.stdout.flush() return False if not os.path.isdir(newpath): try: os.makedirs(newpath) print("Created folder %s" % newpath) sys.stdout.flush() returnValue = 3 except (OSError, IOError) as exception: print("Unable to create path {}; error {}. Exit!\n".format( newpath, exception.errno)) sys.stdout.flush() return False outpath = newpath sys.stdout.flush() if (parsed_args.response is None): uistate = UISTATE_RESPONSE_SELECTION else: uistate = UISTATE_PROCEED elif uistate is UISTATE_RESPONSE_SELECTION: if os.path.isfile(os.path.join(outpath, KEY_C_FILE_NAME)): # There are existing files to use or replace. # The above is the minimum set. # Ask the user what to do via a dialog bb = show_button_box(top, outpath) returnValue = 0 while returnValue is 0: returnValue = bb.returnValue() if returnValue == -1: # Cancelling if parsed_args.folder_for_key_files == None: # Back to location selection UI uistate = UISTATE_LOCATION_SELECTION else: # Folder given so no location selectio UI # Cancel is exit print("Cancelled\n") sys.stdout.flush() return False else: uistate = UISTATE_PROCEED else: returnValue = 3 uistate = UISTATE_PROCEED if tlh is not None: tlh.close() if not os.path.isdir(outpath): try: os.makedirs(outpath) print("Created folder %s" % outpath) returnValue = 3 except (OSError, IOError) as exception: print("Unable to create path {}; error {}. Exit!\n".format( outpath, exception.errno)) return False c_key_file = os.path.join(outpath, KEY_C_FILE_NAME) # Set up the file specs for the files to of interest for archive/create filelist = [] filelist.append(c_key_file) # At this point the returnValue should be: # 1 to use (known to already exist), # 2 to replace (known to already exist), or # 3 to create if returnValue == 2: # Going to replace so archive what we already have zip_filename = nowstring + ".zip" zip_filespec = os.path.join(outpath, zip_filename) # Create the archive for writing "deflated" (compressed) files to try: with zipfile.ZipFile(zip_filespec, mode="w", compression=zipfile.ZIP_DEFLATED) as zip: for listfile in filelist: if os.path.isfile(listfile): print("Adding {} to archive {}".format( listfile, zip_filespec)) zip.write(listfile, os.path.basename(listfile)) except (OSError, IOError) as exception: print("Error with zip file {}; error {}, {}\n" \ .format(zip_filespec, exception.errno, os.strerror(exception.errno))) print("Exiting!\n") sys.stdout.flush() return False try: # Having archived all the files without an error, delete them for listfile in filelist: # Ensure the file is not read-only before trying to delete it os.chmod(listfile, stat.S_IWRITE) os.remove(listfile) print("Deleted {}".format(listfile)) except (OSError, IOError) as exception: print("Error deleting file; error {}, {}\n" \ .format(exception.errno, os.strerror(exception.errno))) restore_archive(nowstring, outpath, filelist) print("Exiting!\n") sys.stdout.flush() return False sys.stdout.flush() # Having archived and deleted the original files, now as for create returnValue = 3 if returnValue >= 2: # Create the required c file create_key_c_source(c_key_file) print("'%s' created" % c_key_file) # Find the c file in the workspace and replace it ws_projects = Workspace(parsed_args.workspace).parse() for project in ws_projects.keys(): result = process_project(build_runner, parsed_args, ws_projects[project].filename, c_key_file, nowstring) if result < 0: # An error has occured restore_archive(nowstring, outpath, filelist) sys.stdout.flush() return False elif result > 0: print("\nPlease rebuild your application which includes project %s" \ % ws_projects[project].filename) sys.stdout.flush() break if result == 0: print("\nUnable to find {} in any of the projects".format(c_key_file)) sys.stdout.flush() return False sys.stdout.flush() return True