def setSconsProgressBarTotal(name, total): # keep track of how many files there are to know when link comes, pylint: disable=global-statement global _total, _stage _total = total _stage = name setupProgressBar(stage="%s C" % name, unit="file", total=total)
def _copyDllsUsed(dist_dir, used_dlls): setupProgressBar( stage="Copying used DLLs", unit="DLL", total=len(used_dlls), ) dll_map = [] for dll_filename, (package_name, sources) in iterItems(used_dlls): dll_name = os.path.basename(dll_filename) target_path = os.path.join(dist_dir, dll_name) reportProgressBar(target_path) # Sometimes DLL dependencies were copied there already. TODO: That should # actually become disallowed with plugins no longer seeing that folder. if not os.path.exists(target_path): copyDllFile(source_path=dll_filename, dest_path=target_path) dll_map.append((dll_filename, package_name, dll_name)) if Options.isShowInclusion(): inclusion_logger.info( "Included used shared library '%s' (used by %s)." % (dll_filename, ", ".join(sources))) closeProgressBar() return dll_map
def _restartProgress(): closeProgressBar() global pass_count # Singleton, pylint: disable=global-statement pass_count += 1 optimization_logger.info_fileoutput("PASS %d:" % pass_count, other_logger=progress_logger) setupProgressBar( stage="PASS %d" % pass_count, unit="module", total=ModuleRegistry.getRemainingModulesCount() + ModuleRegistry.getDoneModulesCount(), )
def _restartProgress(): global pass_count # Singleton, pylint: disable=global-statement closeProgressBar() pass_count += 1 optimization_logger.info_fileoutput("PASS %d:" % pass_count, other_logger=progress_logger) if not Options.is_verbose or optimization_logger.isFileOutput(): setupProgressBar( stage="PASS %d" % pass_count, unit="module", total=ModuleRegistry.getRemainingModulesCount() + ModuleRegistry.getDoneModulesCount(), min_total=last_total, )
def makeSourceDirectory(): """Get the full list of modules imported, create code for all of them.""" # We deal with a lot of details here, but rather one by one, and split makes # no sense, pylint: disable=too-many-branches # assert main_module in ModuleRegistry.getDoneModules() # We might have chosen to include it as bytecode, and only compiled it for # fun, and to find its imports. In this case, now we just can drop it. Or # a module may shadow a frozen module, but be a different one, then we can # drop the frozen one. # TODO: This really should be done when the compiled module comes into # existence. for module in ModuleRegistry.getDoneModules(): if module.isCompiledPythonModule(): uncompiled_module = ModuleRegistry.getUncompiledModule( module_name=module.getFullName(), module_filename=module.getCompileTimeFilename(), ) if uncompiled_module is not None: # We now need to decide which one to keep, compiled or uncompiled # module. Some uncompiled modules may have been asked by the user # or technically required. By default, frozen code if it exists # is preferred, as it will be from standalone mode adding it. if ( uncompiled_module.isUserProvided() or uncompiled_module.isTechnical() ): ModuleRegistry.removeDoneModule(module) else: ModuleRegistry.removeUncompiledModule(uncompiled_module) # Lets check if the asked modules are actually present, and warn the # user if one of those was not found. for any_case_module in Options.getShallFollowModules(): if "*" in any_case_module or "{" in any_case_module: continue for module in ModuleRegistry.getDoneModules(): if module.getFullName() == any_case_module: break else: general.warning( "Did not follow import to unused '%s', consider include options." % any_case_module ) # Prepare code generation, i.e. execute finalization for it. for module in ModuleRegistry.getDoneModules(): if module.isCompiledPythonModule(): Finalization.prepareCodeGeneration(module) # Do some reporting and determine compiled module to work on compiled_modules = [] for module in ModuleRegistry.getDoneModules(): if module.isCompiledPythonModule(): compiled_modules.append(module) if Options.isShowInclusion(): inclusion_logger.info( "Included compiled module '%s'." % module.getFullName() ) elif module.isPythonExtensionModule(): addExtensionModuleEntryPoint(module) if Options.isShowInclusion(): inclusion_logger.info( "Included extension module '%s'." % module.getFullName() ) elif module.isUncompiledPythonModule(): if Options.isShowInclusion(): inclusion_logger.info( "Included uncompiled module '%s'." % module.getFullName() ) else: assert False, module # Pick filenames. source_dir = OutputDirectories.getSourceDirectoryPath() module_filenames = pickSourceFilenames( source_dir=source_dir, modules=compiled_modules ) setupProgressBar( stage="C Source Generation", unit="module", total=len(compiled_modules), ) # Generate code for compiled modules, this can be slow, so do it separately # with a progress bar. for module in compiled_modules: c_filename = module_filenames[module] reportProgressBar( item=module.getFullName(), ) source_code = CodeGeneration.generateModuleCode( module=module, data_filename=os.path.basename(c_filename + "onst"), # Really .const ) writeSourceCode(filename=c_filename, source_code=source_code) closeProgressBar() ( helper_decl_code, helper_impl_code, constants_header_code, constants_body_code, ) = CodeGeneration.generateHelpersCode() writeSourceCode( filename=os.path.join(source_dir, "__helpers.h"), source_code=helper_decl_code ) writeSourceCode( filename=os.path.join(source_dir, "__helpers.c"), source_code=helper_impl_code ) writeSourceCode( filename=os.path.join(source_dir, "__constants.h"), source_code=constants_header_code, ) writeSourceCode( filename=os.path.join(source_dir, "__constants.c"), source_code=constants_body_code, )
def attachOnefilePayload(dist_dir, onefile_output_filename, start_binary, expect_compression): # Somewhat detail rich, pylint: disable=too-many-locals compression_indicator, compressor = getCompressorFunction( expect_compression=expect_compression) with _openBinaryFileForAppending(onefile_output_filename) as output_file: # Seeking to end of file seems necessary on Python2 at least, maybe it's # just that tell reports wrong value initially. output_file.seek(0, 2) start_pos = output_file.tell() output_file.write(b"KA" + compression_indicator) # Move the binary to start immediately to the start position file_list = getFileList(dist_dir, normalize=False) file_list.remove(start_binary) file_list.insert(0, start_binary) if isWin32Windows(): filename_encoding = "utf-16le" else: filename_encoding = "utf8" payload_size = 0 setupProgressBar( stage="Onefile Payload", unit="module", total=len(file_list), ) with compressor(output_file) as compressed_file: for filename_full in file_list: filename_relative = os.path.relpath(filename_full, dist_dir) reportProgressBar( item=filename_relative, update=False, ) filename_encoded = (filename_relative + "\0").encode(filename_encoding) compressed_file.write(filename_encoded) payload_size += len(filename_encoded) with open(filename_full, "rb") as input_file: input_file.seek(0, 2) input_size = input_file.tell() input_file.seek(0, 0) compressed_file.write(struct.pack("Q", input_size)) shutil.copyfileobj(input_file, compressed_file) payload_size += input_size + 8 reportProgressBar( item=filename_relative, update=True, ) # Using empty filename as a terminator. filename_encoded = "\0".encode(filename_encoding) compressed_file.write(filename_encoded) payload_size += len(filename_encoded) compressed_size = compressed_file.tell() if compression_indicator == b"Y": onefile_logger.info( "Onefile payload compression ratio (%.2f%%) size %d to %d." % ( (float(compressed_size) / payload_size) * 100, payload_size, compressed_size, )) if isWin32Windows(): # add padding to have the start position at a double world boundary # this is needed on windows so that a possible certificate immediately # follows the start position pad = output_file.tell() % 8 if pad != 0: output_file.write(bytes(8 - pad)) output_file.seek(0, 2) end_pos = output_file.tell() # Size of the payload data plus the size of that size storage, so C code can # jump directly to it. output_file.write(struct.pack("Q", end_pos - start_pos)) closeProgressBar()
def setSconsProgressBarTotal(total): setupProgressBar(stage="Backend C", unit="file", total=total)
def _detectUsedDLLs(source_dir, standalone_entry_points, use_cache, update_cache): setupProgressBar( stage="Detecting used DLLs", unit="DLL", total=len(standalone_entry_points), ) def addDLLInfo(count, source_dir, original_filename, binary_filename, package_name): used_dlls = _detectBinaryDLLs( is_main_executable=count == 0, source_dir=source_dir, original_filename=original_filename, binary_filename=binary_filename, package_name=package_name, use_cache=use_cache, update_cache=update_cache, ) # Allow plugins to prevent inclusion, this may discard things from used_dlls. Plugins.removeDllDependencies(dll_filename=binary_filename, dll_filenames=used_dlls) for dll_filename in sorted(tuple(used_dlls)): if not os.path.isfile(dll_filename): if _not_found_dlls: general.warning("""\ Dependency '%s' could not be found, expect runtime issues. If this is working with Python, report a Nuitka bug.""" % dll_filename) _not_found_dlls.add(dll_filename) used_dlls.remove(dll_filename) reportProgressBar(binary_filename) return binary_filename, package_name, used_dlls result = OrderedDict() with ThreadPoolExecutor(max_workers=Utils.getCoreCount() * 3) as worker_pool: workers = [] for count, standalone_entry_point in enumerate( standalone_entry_points): workers.append( worker_pool.submit( addDLLInfo, count, source_dir, standalone_entry_point.source_path, standalone_entry_point.dest_path, standalone_entry_point.package_name, )) for binary_filename, package_name, used_dlls in waitWorkers(workers): for dll_filename in used_dlls: # We want these to be absolute paths. Solve that in the parts # where _detectBinaryDLLs is platform specific. assert os.path.isabs(dll_filename), dll_filename if dll_filename not in result: result[dll_filename] = (package_name, []) result[dll_filename][1].append(binary_filename) closeProgressBar() return result