def detectUsedDLLs(source_dir, standalone_entry_points, use_cache, update_cache): def addDLLInfo(count, source_dir, original_filename, binary_filename, package_name): used_dlls = detectBinaryDLLs( is_main_executable=count == 0, source_dir=source_dir, original_filename=original_filename, binary_filename=binary_filename, package_name=package_name, use_cache=use_cache, update_cache=update_cache, ) # Allow plugins to prevent inclusion, this may discard things from used_dlls. Plugins.removeDllDependencies(dll_filename=binary_filename, dll_filenames=used_dlls) for dll_filename in sorted(tuple(used_dlls)): if not os.path.isfile(dll_filename): if _unfound_dlls: general.warning( "Dependency '%s' could not be found, you might need to copy it manually." % dll_filename) _unfound_dlls.add(dll_filename) used_dlls.remove(dll_filename) return binary_filename, used_dlls result = OrderedDict() with ThreadPoolExecutor(max_workers=Utils.getCoreCount() * 3) as worker_pool: workers = [] for count, standalone_entry_point in enumerate( standalone_entry_points): workers.append( worker_pool.submit( addDLLInfo, count, source_dir, standalone_entry_point.source_path, standalone_entry_point.dest_path, standalone_entry_point.package_name, )) for binary_filename, used_dlls in waitWorkers(workers): for dll_filename in used_dlls: # We want these to be absolute paths. Solve that in the parts # where detectBinaryDLLs is platform specific. assert os.path.isabs(dll_filename), dll_filename if dll_filename not in result: result[dll_filename] = [] result[dll_filename].append(binary_filename) return result
def detectUsedDLLs(source_dir, standalone_entry_points): def addDLLInfo(count, source_dir, original_filename, binary_filename, package_name): used_dlls = detectBinaryDLLs(is_main_executable=count == 0, source_dir=source_dir, original_filename=original_filename, binary_filename=binary_filename, package_name=package_name) return binary_filename, used_dlls result = OrderedDict() with ThreadPoolExecutor(max_workers=Utils.getCoreCount() * 3) as worker_pool: workers = [] for count, (original_filename, binary_filename, package_name) in enumerate(standalone_entry_points): workers.append( worker_pool.submit(addDLLInfo, count, source_dir, original_filename, binary_filename, package_name)) for binary_filename, used_dlls in waitWorkers(workers): for dll_filename in used_dlls: # We want these to be absolute paths. Solve that in the parts # where detectBinaryDLLs is platform specific. assert os.path.isabs(dll_filename), dll_filename if dll_filename not in result: result[dll_filename] = [] result[dll_filename].append(binary_filename) return result
default=None, help="""\ Enforce the use of specific MSVC version on Windows. Allowed values are e.g. 14.0, specify an illegal value for a list of installed compilers, beware that only latest MSVC is really supported. Defaults to the most recent version.""", ) c_compiler_group.add_option( "-j", "--jobs", action="store", dest="jobs", metavar="N", default=Utils.getCoreCount(), help="""\ Specify the allowed number of parallel C compiler jobs. Defaults to the system CPU count.""", ) c_compiler_group.add_option( "--lto", action="store_true", dest="lto", default=False, help="""\ Use link time optimizations if available and usable (MSVC or gcc 4.6 and higher). Defaults to off.""", )
"--msvc", action = "store", dest = "msvc", default = None, help = """\ Enforce the use of specific MSVC version on Windows. Allowed values are e.g. 9.0, 9.0exp, specify an illegal value for a list of installed compilers. Defaults to the most recent version.""" ) cpp_compiler_group.add_option( "-j", "--jobs", action = "store", dest = "jobs", metavar = 'N', default = Utils.getCoreCount(), help = """\ Specify the allowed number of parallel C++ compiler jobs. Defaults to the system CPU count.""", ) cpp_compiler_group.add_option( "--lto", action = "store_true", dest = "lto", default = False, help = """\ Use link time optimizations if available and usable (g++ 4.6 and higher). Defaults to off.""" )
def _detectUsedDLLs(source_dir, standalone_entry_points, use_cache, update_cache): setupProgressBar( stage="Detecting used DLLs", unit="DLL", total=len(standalone_entry_points), ) def addDLLInfo(count, source_dir, original_filename, binary_filename, package_name): used_dlls = _detectBinaryDLLs( is_main_executable=count == 0, source_dir=source_dir, original_filename=original_filename, binary_filename=binary_filename, package_name=package_name, use_cache=use_cache, update_cache=update_cache, ) # Allow plugins to prevent inclusion, this may discard things from used_dlls. Plugins.removeDllDependencies(dll_filename=binary_filename, dll_filenames=used_dlls) for dll_filename in sorted(tuple(used_dlls)): if not os.path.isfile(dll_filename): if _not_found_dlls: general.warning("""\ Dependency '%s' could not be found, expect runtime issues. If this is working with Python, report a Nuitka bug.""" % dll_filename) _not_found_dlls.add(dll_filename) used_dlls.remove(dll_filename) reportProgressBar(binary_filename) return binary_filename, package_name, used_dlls result = OrderedDict() with ThreadPoolExecutor(max_workers=Utils.getCoreCount() * 3) as worker_pool: workers = [] for count, standalone_entry_point in enumerate( standalone_entry_points): workers.append( worker_pool.submit( addDLLInfo, count, source_dir, standalone_entry_point.source_path, standalone_entry_point.dest_path, standalone_entry_point.package_name, )) for binary_filename, package_name, used_dlls in waitWorkers(workers): for dll_filename in used_dlls: # We want these to be absolute paths. Solve that in the parts # where _detectBinaryDLLs is platform specific. assert os.path.isabs(dll_filename), dll_filename if dll_filename not in result: result[dll_filename] = (package_name, []) result[dll_filename][1].append(binary_filename) closeProgressBar() return result