def buildSconsCommand(quiet, options): scons_command = getSconsBinaryCall() if quiet: scons_command.append("--quiet") scons_command += [ # The scons file "-f", Utils.joinpath(getSconsDataPath(), "SingleExe.scons"), # Parallel compilation. "--jobs", str(Options.getJobLimit()), # Do not warn about deprecation from Scons "--warn=no-deprecated", # Don't load "site_scons" at all. "--no-site-dir", ] if Options.isShowScons(): scons_command.append("--debug=explain") # Option values to provide to scons. Find these in the caller. for key, value in options.items(): scons_command += [key + '=' + value] return scons_command
def runScons( options, quiet ): # For the scons file to find the static C++ files and include path. The scons file is # unable to use __file__ for the task. os.environ[ "NUITKA_SCONS" ] = getSconsDataPath() if os.name == "nt": # On Windows this Scons variable must be set by us. os.environ[ "SCONS_LIB_DIR" ] = Utils.joinpath( getSconsInlinePath(), "lib", "scons-2.0.1" ) # Also, for MinGW we can avoid the user having to add the path if he used the # default path or installed it on the same drive by appending to the PATH variable # before executing scons. os.environ[ "PATH" ] += r";\MinGW\bin;C:\MinGW\bin" scons_command = """%(python)s %(binary)s %(quiet)s -f %(scons_file)s --jobs %(job_limit)d %(options)s""" % { "python" : sys.executable if Utils.python_version < 300 else "python", "binary" : getSconsBinaryPath(), "quiet" : "--quiet" if quiet else "", "scons_file" : Utils.joinpath( getSconsDataPath(), "SingleExe.scons" ), "job_limit" : Options.getJobLimit(), "options" : " ".join( "%s=%s" % ( key, value ) for key, value in options.items() ) } if Options.isShowScons(): Tracing.printLine( "Scons command:", scons_command ) return 0 == os.system( scons_command )
def importUserPlugins(): """ Extract the filenames of user plugins and store them in list of active plugins. Notes: A plugin is accepted only if it has a non-empty variable plugin_name, which does not equal that of a disabled (standard) plugin. Supports plugin option specifications. Returns: None """ for plugin_filename in Options.getUserPlugins(): plugin_filename = plugin_filename.split("=", 1)[0] if not os.path.exists(plugin_filename): sys.exit("Error, cannot find '%s'." % plugin_filename) user_plugin_module = importFile(plugin_filename) for key in dir(user_plugin_module): obj = getattr(user_plugin_module, key) if not isObjectAUserPluginBaseClass(obj): continue plugin_name = getattr(obj, "plugin_name", None) if plugin_name and plugin_name not in Options.getPluginsDisabled(): active_plugin_list.append(obj()) info("User plugin '%s' loaded." % plugin_filename)
def buildStatementContinueLoop(provider, node, source_ref): if getBuildContext() == "finally": if not Options.isFullCompat() or Utils.python_version >= 300: col_offset = node.col_offset - 9 else: col_offset = None if Utils.python_version >= 300 and Options.isFullCompat(): source_line = "" else: source_line = None SyntaxErrors.raiseSyntaxError( "'continue' not supported inside 'finally' clause", source_ref, col_offset = col_offset, source_line = source_line ) return makeTryFinallyIndicator( provider = provider, statement = StatementContinueLoop( source_ref = source_ref ), is_loop_exit = True )
def handleNonlocalDeclarationNode(provider, node, source_ref): # The source reference of the nonlocal really doesn't matter. # pylint: disable=W0613 # Need to catch the error of declaring a parameter variable as global # ourselves here. The AST parsing doesn't catch it, but we can do it here. parameters = provider.getParameters() for variable_name in node.names: if variable_name in parameters.getParameterNames(): SyntaxErrors.raiseSyntaxError( reason = "name '%s' is parameter and nonlocal" % ( variable_name ), source_ref = None if Options.isFullCompat() and \ Utils.python_version < 340 else source_ref, display_file = not Options.isFullCompat() or \ Utils.python_version >= 340, display_line = not Options.isFullCompat() or \ Utils.python_version >= 340 ) provider.addNonlocalsDeclaration(node.names, source_ref) return None
def handleNonlocalDeclarationNode(provider, node, source_ref): # Need to catch the error of declaring a parameter variable as global # ourselves here. The AST parsing doesn't catch it, but we can do it here. parameter_provider = provider while parameter_provider.isExpressionGeneratorObjectBody() or \ parameter_provider.isExpressionCoroutineObjectBody(): parameter_provider = parameter_provider.getParentVariableProvider() if parameter_provider.isExpressionClassBody(): parameter_names = () else: parameter_names = parameter_provider.getParameters().getParameterNames() for variable_name in node.names: if variable_name in parameter_names: SyntaxErrors.raiseSyntaxError( reason = "name '%s' is parameter and nonlocal" % ( variable_name ), source_ref = None if Options.isFullCompat() and \ python_version < 340 else source_ref, display_file = not Options.isFullCompat() or \ python_version >= 340, display_line = not Options.isFullCompat() or \ python_version >= 340 ) provider.addNonlocalsDeclaration(node.names, source_ref) return None
def createModuleTree(module, source_ref, source_filename, is_main): if Options.isShowProgress(): memory_watch = Utils.MemoryWatch() source_code = readSourceCodeFromFilename( source_filename ) module_body = buildParseTree( provider = module, source_code = source_code, source_ref = source_ref, is_module = True, is_main = is_main ) module.setBody( module_body ) completeVariableClosures( module ) if Options.isShowProgress(): memory_watch.finish() Tracing.printLine( "Memory usage changed loading module '%s': %s" % ( module.getFullName(), memory_watch.asStr() ) )
def createModuleTree(module, source_ref, source_code, is_main): if Options.isShowProgress(): memory_watch = Utils.MemoryWatch() try: module_body = buildParseTree( provider=module, source_code=source_code, source_ref=source_ref, is_module=True, is_main=is_main ) except RuntimeError as e: if "maximum recursion depth" in e.args[0]: raise CodeTooComplexCode(module.getFullName(), module.getCompileTimeFilename()) raise if module_body.isStatementsFrame(): module_body = makeStatementsSequenceFromStatement(statement=module_body) module.setBody(module_body) completeVariableClosures(module) if Options.isShowProgress(): memory_watch.finish() Tracing.printLine("Memory usage changed loading module '%s': %s" % (module.getFullName(), memory_watch.asStr()))
def getModuleCode(module_context, template_values): header = template_global_copyright % { "name" : module_context.getName(), "version" : Options.getVersion(), "year" : Options.getYear() } decls, inits, checks = getConstantInitCodes(module_context) if module_context.needsModuleFilenameObject(): decls.append("static PyObject *module_filename_obj;") template_values["constant_decl_codes"] = indented( decls, 0 ) template_values["constant_init_codes"] = indented( inits, 1 ) template_values["constant_check_codes"] = indented( checks, 1 ) return header + template_module_body_template % template_values
def createModuleTree(module, source_ref, source_code, is_main): if Options.isShowProgress(): memory_watch = Utils.MemoryWatch() module_body = buildParseTree( provider = module, source_code = source_code, source_ref = source_ref, is_module = True, is_main = is_main ) if module_body.isStatementsFrame(): module_body = makeStatementsSequenceFromStatement( statement = module_body, ) module.setBody(module_body) completeVariableClosures(module) if Options.isShowProgress(): memory_watch.finish() Tracing.printLine( "Memory usage changed loading module '%s': %s" % ( module.getFullName(), memory_watch.asStr() ) )
def buildStatementContinueLoop(node, source_ref): if getBuildContext() == "finally": if not Options.isFullCompat() or Utils.python_version >= 300: col_offset = node.col_offset - 9 else: col_offset = None if Utils.python_version >= 300 and Options.isFullCompat(): source_line = "" else: source_line = None SyntaxErrors.raiseSyntaxError( "'continue' not supported inside 'finally' clause", source_ref, col_offset = col_offset, source_line = source_line ) statements = makeTryFinallyIndicatorStatements( is_loop_exit = True, source_ref = source_ref ) statements.append( StatementContinueLoop( source_ref = source_ref ) ) return makeStatementsSequenceOrStatement( statements = statements, source_ref = source_ref )
def getMainCode(main_module, codes, context): python_flags = Options.getPythonFlags() if context.isEmptyModule(): code_identifier = "NULL" else: code_identifier = context.getCodeObjectHandle( filename = main_module.getRunTimeFilename(), var_names = (), arg_count = 0, kw_only_count = 0, line_number = 0, code_name = "<module>", is_generator = False, is_optimized = False, has_starlist = False, has_stardict = False, has_closure = False, future_flags = main_module.getSourceReference().getFutureSpec().\ asFlags() ) main_code = CodeTemplates.main_program % { "sys_executable" : getModuleConstantCode( constant = sys.executable, ), "python_sysflag_debug" : sys.flags.debug, "python_sysflag_py3k_warning" : ( sys.flags.py3k_warning if hasattr(sys.flags, "py3k_warning") else 0 ), "python_sysflag_division_warning" : ( sys.flags.division_warning if hasattr(sys.flags, "division_warning") else 0 ), #"python_sysflag_division_new" : sys.flags.division_new, #not supported "python_sysflag_inspect" : sys.flags.inspect, "python_sysflag_interactive" : sys.flags.interactive, "python_sysflag_optimize" : sys.flags.optimize, "python_sysflag_dont_write_bytecode" : sys.flags.dont_write_bytecode, "python_sysflag_no_site" : os.environ.get( "NUITKA_SITE_FLAG", 1 if "no_site" in Options.getPythonFlags() else 0 ), "python_sysflag_no_user_site" : sys.flags.no_user_site, "python_sysflag_ignore_environment" : sys.flags.ignore_environment, "python_sysflag_tabcheck" : ( sys.flags.tabcheck if hasattr(sys.flags, "tabcheck") else 0 ), "python_sysflag_verbose" : 1 if "trace_imports" in python_flags else 0, "python_sysflag_unicode" : ( sys.flags.unicode if hasattr(sys.flags, "unicode") else 0 ), "python_sysflag_bytes_warning" : sys.flags.bytes_warning, "python_sysflag_hash_randomization" : ( sys.flags.hash_randomization if (hasattr(sys.flags, "hash_randomization") and \ "no_randomization" not in python_flags) else 0 ), "python_no_warnings" : 1 if "no_warnings" in python_flags else 0, "code_identifier" : code_identifier } return codes + main_code
def generateDelSliceCode(statement, emit, context): assert python_version < 300 target = statement.getLookupSource() lower = statement.getLower() upper = statement.getUpper() target_name = context.allocateTempName("slicedel_target") generateExpressionCode( to_name=target_name, expression=target, emit=emit, context=context ) if _decideSlicing(lower, upper): lower_name, upper_name = _generateSliceRangeIdentifier( lower=lower, upper=upper, scope="slicedel", emit=emit, context=context ) old_source_ref = context.setCurrentSourceCodeReference( (upper or lower or statement).getSourceReference() if Options.isFullCompat() else statement.getSourceReference() ) _getSliceDelIndexesCode( target_name=target_name, lower_name=lower_name, upper_name=upper_name, emit=emit, context=context, ) context.setCurrentSourceCodeReference(old_source_ref) else: lower_name, upper_name = generateExpressionsCode( names=("slicedel_lower", "slicedel_upper"), expressions=(lower, upper), emit=emit, context=context, ) old_source_ref = context.setCurrentSourceCodeReference( (upper or lower or target).getSourceReference() if Options.isFullCompat() else statement.getSourceReference() ) _getSliceDelCode( target_name=target_name, lower_name=lower_name, upper_name=upper_name, emit=emit, context=context, ) context.setCurrentSourceCodeReference(old_source_ref)
def _addConstantInitCode(context, emit, check, constant_type, constant_value, constant_identifier, module_level): """ Emit code for a specific constant to be prepared during init. This may be module or global init. Code makes sure that nested constants belong into the same scope. """ # This is just a wrapper to make sure that hash values become initialized # for every constant too. if constant_value in constant_builtin_types: return if constant_value is None: return if constant_value is False: return if constant_value is True: return if constant_value is Ellipsis: return # Do not repeat ourselves. if constant_identifier in done: return if Options.shallTraceExecution(): emit("""puts("Creating constant: %s");""" % constant_identifier) # Then it's a real named constant not yet created. __addConstantInitCode(context, emit, check, constant_type, constant_value, constant_identifier, module_level) if Options.isDebug(): emit( """\ hash_%(constant_identifier)s = DEEP_HASH( %(constant_identifier)s );""" % { "constant_identifier" : constant_identifier } ) check( """\ CHECK_OBJECT( %(constant_identifier)s ); assert( hash_%(constant_identifier)s == DEEP_HASH( %(constant_identifier)s ) );""" % { "constant_identifier" : constant_identifier } ) if Options.isExperimental(): check( """\ if ( hash_%(constant_identifier)s == -1 ) puts("Note: Weak hash for: %(constant_identifier)s.");""" % { "constant_identifier" : constant_identifier } )
def getMainCode(codes, context): python_flags = Options.getPythonFlags() if context.isEmptyModule(): code_identifier = NullIdentifier() else: code_identifier = getCodeObjectHandle( context = context, filename = context.getFilename(), var_names = (), arg_count = 0, kw_only_count = 0, line_number = 0, code_name = "<module>", is_generator = False, is_optimized = False, has_starlist = False, has_stardict = False ) main_code = CodeTemplates.main_program % { "sys_executable" : getConstantCode( constant = "python.exe" if Utils.getOS() == "Windows" and \ Options.isStandaloneMode() else sys.executable, context = context ), "python_sysflag_debug" : sys.flags.debug, "python_sysflag_py3k_warning" : ( sys.flags.py3k_warning if hasattr( sys.flags, "py3k_warning" ) else 0 ), "python_sysflag_division_warning" : ( sys.flags.division_warning if hasattr( sys.flags, "division_warning" ) else 0 ), #"python_sysflag_division_new" : sys.flags.division_new, #not supported "python_sysflag_inspect" : sys.flags.inspect, "python_sysflag_interactive" : sys.flags.interactive, "python_sysflag_optimize" : sys.flags.optimize, "python_sysflag_dont_write_bytecode" : sys.flags.dont_write_bytecode, "python_sysflag_no_site" : sys.flags.no_site, "python_sysflag_no_user_site" : sys.flags.no_user_site, "python_sysflag_ignore_environment" : sys.flags.ignore_environment, "python_sysflag_tabcheck" : ( sys.flags.tabcheck if hasattr( sys.flags, "tabcheck" ) else 0 ), "python_sysflag_verbose" : 1 if "trace_imports" in python_flags else 0, "python_sysflag_unicode" : ( sys.flags.unicode if hasattr( sys.flags, "unicode" ) else 0 ), "python_sysflag_bytes_warning" : sys.flags.bytes_warning, "python_sysflag_hash_randomization" : ( sys.flags.hash_randomization if hasattr( sys.flags, "hash_randomization" ) and "no_randomization" not in python_flags else 0 ), "code_identifier" : code_identifier.getCodeTemporaryRef() } return codes + main_code
def runScons(options, quiet): # For the scons file to find the static C++ files and include path. The # scons file is unable to use __file__ for the task. os.environ["NUITKA_SCONS"] = getSconsDataPath() if Utils.getOS() == "Windows": # On Windows this Scons variable must be set by us. os.environ["SCONS_LIB_DIR"] = Utils.joinpath( getSconsInlinePath(), "lib", "scons-2.3.0" ) # Also, for MinGW we can avoid the user having to add the path if he # used the default path or installed it on the same drive by appending # to the PATH variable before executing scons. os.environ["PATH"] += r";\MinGW\bin;C:\MinGW\bin" scons_command = getSconsBinaryCall() if quiet: scons_command.append("--quiet") scons_command += [ # The scons file "-f", Utils.joinpath(getSconsDataPath(), "SingleExe.scons"), # Parallel compilation. "--jobs", str(Options.getJobLimit()), # Do not warn about deprecations of Scons "--warn=no-deprecated", # Don't load "site_scons" at all. "--no-site-dir", ] if Options.isShowScons(): scons_command.append("--debug=explain") # Option values to provide to scons. for key, value in options.items(): scons_command += [key + "=" + value] if Options.isShowScons(): Tracing.printLine("Scons command:", " ".join(scons_command)) return 0 == subprocess.call(scons_command)
def optimizeCompiledPythonModule(module): if _progress: printLine( "Doing module local optimizations for '{module_name}'.".format( module_name = module.getFullName() ) ) touched = False if _progress and Options.isShowMemory(): memory_watch = MemoryUsage.MemoryWatch() while True: tag_set.clear() try: module.computeModule() except BaseException: info("Interrupted while working on '%s'." % module) raise Graphs.onModuleOptimizationStep(module) # Search for local change tags. for tag in tag_set: if tag == "new_code": continue break else: break # Otherwise we did stuff, so note that for return value. touched = True if _progress and Options.isShowMemory(): memory_watch.finish() printLine( "Memory usage changed during optimization of '%s': %s" % ( module.getFullName(), memory_watch.asStr() ) ) Plugins.considerImplicitImports(module, signal_change = signalChange) return touched
def attemptRecursion( self, module ): if not Options.shallMakeModule(): # Make sure the package is recursed to. module_package = module.getPackage() if module_package is not None: package_package, _package_module_name, package_filename = Importing.findModule( source_ref = module.getSourceReference(), module_name = module_package, parent_package = None, level = 1 ) imported_module, added_flag = Recursion.recurseTo( module_package = package_package, module_filename = package_filename, module_relpath = Utils.relpath( package_filename ) ) if added_flag: self.signalChange( "new_code", imported_module.getSourceReference(), "Recursed to module package." )
def makeModuleFrame(module, statements, source_ref): assert module.isCompiledPythonModule() if Options.isFullCompat(): code_name = "<module>" else: if module.isMainModule(): code_name = "<module>" else: code_name = "<module %s>" % module.getFullName() return StatementsFrameModule( statements=statements, code_object=CodeObjectSpec( co_name=code_name, co_kind="Module", co_varnames=(), co_argcount=0, co_kwonlyargcount=0, co_has_starlist=False, co_has_stardict=False, co_filename=module.getRunTimeFilename(), co_lineno=source_ref.getLineNumber(), future_spec=module.getFutureSpec(), ), source_ref=source_ref, )
def getRunTimeFilename(self): # TODO: Don't look at such things this late, push this into building. reference_mode = Options.getFileReferenceMode() if reference_mode == "original": return self.getCompileTimeFilename() elif reference_mode == "frozen": return "<frozen %s>" % self.getFullName() else: filename = self.getCompileTimeFilename() full_name = self.getFullName() result = os.path.basename(filename) current = filename levels = full_name.count(".") if self.isCompiledPythonPackage(): levels += 1 for _i in range(levels): current = os.path.dirname(current) result = os.path.join(os.path.basename(current), result) return result
def generateBytecodeFrozenCode(): frozen_defs = [] for frozen_module in frozen_modules: module_name, code_data, is_package, _filename, _is_late = frozen_module size = len(code_data) # Packages are indicated with negative size. if is_package: size = -size frozen_defs.append( """\ {{ (char *)"{module_name}", (unsigned char *){data}, {size} }},""".format( module_name = module_name, data = stream_data.getStreamDataCode( value = code_data, fixed_size = True ), size = size ) ) if Options.isShowInclusion(): info("Embedded as frozen module '%s'.", module_name) return CodeTemplates.template_frozen_modules % { "frozen_modules" : indented(frozen_defs) }
def getConstantsDeclCode(context): statements = [] # Sort items by length and name, so we are deterministic and pretty. sorted_constants = sorted( iterItems(context.getConstants()), key = lambda k: (len(k[0]), k[0]) ) for constant_identifier, constant_value in sorted_constants: # Need not declare built-in types. if constant_value in constant_builtin_types: continue if constant_value is None: continue if constant_value is False: continue if constant_value is True: continue if constant_value is Ellipsis: continue if context.getConstantUseCount(constant_identifier) != 1: statements.append("PyObject *%s;" % constant_identifier) if Options.isDebug(): statements.append("Py_hash_t hash_%s;" % constant_identifier) return statements
def optimize(output_filename): Graphs.startGraph() # First pass. if _progress: info("PASS 1:") makeOptimizationPass(initial_pass=True) Variables.complete = True finished = makeOptimizationPass(initial_pass=False) if Options.isExperimental("check_xml_persistence"): _checkXMLPersistence() # Demote compiled modules to bytecode, now that imports had a chance to be resolved, and # dependencies were handled. for module in ModuleRegistry.getDoneUserModules(): if module.isCompiledPythonModule() and module.mode == "bytecode": demoteCompiledModuleToBytecode(module) if _progress: info("PASS 2 ... :") # Second, "endless" pass. while not finished: finished = makeOptimizationPass(initial_pass=False) Graphs.endGraph(output_filename)
def _handleFutureImport(provider, node, source_ref): # Don't allow future imports in functions or classes. if not provider.isCompiledPythonModule(): SyntaxErrors.raiseSyntaxError( reason = """\ from __future__ imports must occur at the beginning of the file""", col_offset = 8 if python_version >= 300 or \ not Options.isFullCompat() else None, source_ref = source_ref ) for import_desc in node.names: object_name, _local_name = import_desc.name, import_desc.asname _enableFutureFeature( object_name = object_name, future_spec = source_ref.getFutureSpec(), source_ref = source_ref ) # Remember it for checks to be applied once module is complete, e.g. if # they are all at module start. node.source_ref = source_ref _future_import_nodes.append(node)
def detectEarlyImports(): if Options.freezeAllStdlib(): stdlib_modules = set() # Scan the standard library paths (multiple in case of virtualenv. for stdlib_dir in getStandardLibraryPaths(): for module_name in scanStandardLibraryPath(stdlib_dir): stdlib_modules.add(module_name) import_code = "imports = " + repr(sorted(stdlib_modules)) + '\n'\ "for imp in imports:\n" \ " try:\n" \ " __import__(imp)\n" \ " except ImportError:\n" \ " pass\n" else: # TODO: Should recursively include all of encodings module. import_code = "import encodings.utf_8;import encodings.ascii;import encodings.idna;" if Utils.getOS() == "Windows": import_code += "import encodings.mbcs;import encodings.cp437;" # String method hex depends on it. if Utils.python_version < 300: import_code += "import encodings.hex_codec;" import_code += "import locale;" result = _detectImports(import_code, False) debug("Finished detecting early imports.") return result
def _generateStatementSequenceCode(statement_sequence, emit, context): if statement_sequence is None: return for statement in statement_sequence.getStatements(): if Options.shallTraceExecution(): source_ref = statement.getSourceReference() statement_repr = repr(statement) source_repr = source_ref.getAsString() if python_version >= 300: statement_repr = statement_repr.encode("utf8") source_repr = source_repr.encode("utf8") emit( getStatementTrace( source_repr, statement_repr ) ) # Might contain frame statement sequences as children. if statement.isStatementsFrame(): generateStatementsFrameCode( statement_sequence = statement, emit = emit, context = context ) else: generateStatementCode( statement = statement, emit = emit, context = context )
def generateBytecodeFrozenCode(): frozen_defs = [] for uncompiled_module in getUncompiledModules(): module_name = uncompiled_module.getFullName() code_data = uncompiled_module.getByteCode() is_package = uncompiled_module.isPackage() size = len(code_data) # Packages are indicated with negative size. if is_package: size = -size frozen_defs.append( """\ {{ (char *)"{module_name}", (unsigned char *){data}, {size} }},""".format( module_name = module_name, data = stream_data.getStreamDataCode( value = code_data, fixed_size = True ), size = size ) ) if Options.isShowInclusion(): info("Embedded as frozen module '%s'.", module_name) return template_frozen_modules % { "frozen_modules" : indented(frozen_defs, 2) }
def computeExpression( self, constraint_collection ): # TODO: Enable below code once safer. if True or not Options.isExperimental(): return self, None, None target = self.getValue().getValueFriend( constraint_collection ) if target.isKnownToBeIterableAtMin( 1, constraint_collection ): value = target.getIterationNext( constraint_collection ) if value is not None: if value.isNode() and not self.parent.isStatementExpressionOnly(): # As a side effect, keep the iteration, later checks may depend on it, # or if absent, optimizations will remove it. if not self.parent.isExpressionSideEffects(): value = ExpressionSideEffects( expression = value.makeCloneAt( source_ref = self.getSourceReference() ), side_effects = ( self.makeCloneAt( source_ref = self.getSourceReference() ), ), source_ref = self.getSourceReference() ) return value, "new_expression", "Predicted next iteration result" else: assert False, target return self, None, None
def _readSourceCodeFromFilename3(source_filename): import tokenize try: with open(source_filename, "rb") as source_file: encoding = tokenize.detect_encoding(source_file.readline)[0] # @UndefinedVariable # Rewind to get the whole file. source_file.seek(0) source_code = source_file.read() return source_code.decode(encoding) except SyntaxError as e: if Options.isFullCompat(): if PythonVersions.doShowUnknownEncodingName(): match = re.match("unknown encoding for '.*?': (.*)", e.args[0]) complaint = match.group(1) else: complaint = "with BOM" e.args = ( "encoding problem: %s" % complaint, (source_filename, 1, None, None) ) if hasattr(e, "msg"): e.msg = e.args[0] raise
def getExceptionKeeperVariableNames(keeper_index): # For finally handlers of Python3, which have conditions on assign and # use. debug = Options.isDebug() and python_version >= 300 if debug: keeper_obj_init = " = NULL" else: keeper_obj_init = "" return ( "PyObject *exception_keeper_type_%d%s;" % ( keeper_index, keeper_obj_init ), "PyObject *exception_keeper_value_%d%s;" % ( keeper_index, keeper_obj_init ), "PyTracebackObject *exception_keeper_tb_%d%s;" % ( keeper_index, keeper_obj_init ), "NUITKA_MAY_BE_UNUSED int exception_keeper_lineno_%d%s;" % ( keeper_index, " = -1" if debug else "" ) )
def isRelevant(): return Options.isStandaloneMode()
def copyUsedDLLs(dist_dir, standalone_entry_points): # This is terribly complex, because we check the list of used DLLs # trying to avoid duplicates, and detecting errors with them not # being binary identical, so we can report them. And then of course # we also need to handle OS specifics, pylint: disable=R0912 dll_map = [] used_dlls = detectUsedDLLs(standalone_entry_points) for dll_filename1, sources1 in tuple(iterItems(used_dlls)): for dll_filename2, sources2 in tuple(iterItems(used_dlls)): if dll_filename1 == dll_filename2: continue # Colliding basenames are an issue to us. if Utils.basename(dll_filename1) != Utils.basename(dll_filename2): continue # May already have been removed earlier if dll_filename1 not in used_dlls: continue if dll_filename2 not in used_dlls: continue dll_name = Utils.basename(dll_filename1) if Options.isShowInclusion(): info("""Colliding DLL names for %s, checking identity of \ '%s' <-> '%s'.""" % ( dll_name, dll_filename1, dll_filename2, )) # Check that if a DLL has the same name, if it's identical, # happens at least for OSC and Fedora 20. import filecmp if filecmp.cmp(dll_filename1, dll_filename2): del used_dlls[dll_filename2] continue # So we have conflicting DLLs, in which case we do not proceed. sys.exit("""Error, conflicting DLLs for '%s'. %s used by: %s different from %s used by %s""" % (dll_name, dll_filename1, "\n ".join(sources1), dll_filename2, "\n ".join(sources2))) for dll_filename, sources in iterItems(used_dlls): dll_name = Utils.basename(dll_filename) target_path = Utils.joinpath(dist_dir, dll_name) shutil.copy(dll_filename, target_path) dll_map.append((dll_filename, dll_name)) if Options.isShowInclusion(): info("Included used shared library '%s' (used by %s)." % (dll_filename, ", ".join(sources))) if Utils.getOS() == "Darwin": # For MacOS, the binary and the DLLs needs to be changed to reflect # the relative DLL location in the ".dist" folder. for standalone_entry_point in standalone_entry_points: fixupBinaryDLLPaths( binary_filename=standalone_entry_point[0], is_exe=standalone_entry_point is standalone_entry_points[0], dll_map=dll_map) for _original_path, dll_filename in dll_map: fixupBinaryDLLPaths(binary_filename=Utils.joinpath( dist_dir, dll_filename), is_exe=False, dll_map=dll_map) if Utils.getOS() == "Linux": # For Linux, the "rpath" of libraries may be an issue and must be # removed. for _original_path, dll_filename in dll_map: removeSharedLibraryRPATH(Utils.joinpath(dist_dir, dll_filename)) for standalone_entry_point in standalone_entry_points[1:]: removeSharedLibraryRPATH(standalone_entry_point[0])
def _getConstantDefaultPopulation(): result = ( # Basic values that the helper code uses all the times. (), {}, "", True, False, 0, 1, # For Python3 empty bytes, no effect for Python2, same as "", used for # code objects. b"", # Python mechanics, used in various helpers. "__module__", "__class__", "__name__", "__metaclass__", "__dict__", "__doc__", "__file__", "__enter__", "__exit__", "__builtins__", "__all__", "__cmp__", "__iter__", # Patched module name. "inspect", # Names of builtins used in helper code. "compile", "range", "open", "__import__", ) # For Python3 modules if python_version >= 300: result += ( "__cached__", ) # For Python3 print if python_version >= 300: result += ( "print", "end", "file", ) if python_version >= 330: result += ( # Modules have that attribute. "__loader__", ) if python_version >= 340: result += ( # YIELD_FROM uses this starting 3.4, with 3.3 other code is used. "send", ) if python_version >= 330: result += ( # YIELD_FROM uses this "throw", "close", ) # For patching Python2 internal class type if python_version < 300: result += ( "__getattr__", "__setattr__", "__delattr__", ) # For patching Python2 sys attributes for current exception if python_version < 300: result += ( "exc_type", "exc_value", "exc_traceback" ) # The xrange built-in is Python2 only. if python_version < 300: result += ( "xrange", ) # Executables only if not Options.shallMakeModule(): result += ( "__main__", ) # The "site" module is referenced in inspect patching. result += ( "site", ) # Builtin original values if not Options.shallMakeModule(): result += ( "type", "len", "range", "repr", "int", "iter", ) if python_version < 300: result += ( "long", ) # Disabling warnings at startup if "no_warnings" in Options.getPythonFlags(): result += ( "ignore", ) return result
# add your class here. The second one is a detector, which is supposed to give # a missing plug-in message, should it find the condition to make it useful. optional_plugin_classes = ( (NuitkaPluginMultiprocessingWorkaorunds, NuitkaPluginDetectorMultiprocessingWorkaorunds), (NuitkaPluginPyQtPySidePlugins, NuitkaPluginDetectorPyQtPySidePlugins), (NuitkaPluginPylintEclipseAnnotations, NuitkaPluginDetectorPylintEclipseAnnotations), (NuitkaPluginDataFileCollector, NuitkaPluginDetectorDataFileCollector), (NuitkaPluginPmw, NuitkaPluginDetectorPmw), ) plugin_name2plugin_classes = dict( (plugin[0].plugin_name, plugin) for plugin in optional_plugin_classes) for plugin_name in Options.getPluginsEnabled() + Options.getPluginsDisabled(): if plugin_name not in plugin_name2plugin_classes: sys.exit("Error, unknown plug-in '%s' referenced." % plugin_name) if plugin_name in Options.getPluginsEnabled() and \ plugin_name in Options.getPluginsDisabled(): sys.exit("Error, conflicting enable/disable of plug-in '%s'." % plugin_name) for plugin_name, (plugin_class, plugin_detector) in plugin_name2plugin_classes.items(): if plugin_name in Options.getPluginsEnabled(): active_plugin_list.append( plugin_class(**Options.getPluginOptions(plugin_name))) elif plugin_name not in Options.getPluginsDisabled(): if plugin_detector is not None \
def _addConstantInitCode( context, emit, check, constant_type, constant_value, constant_identifier, module_level, ): """ Emit code for a specific constant to be prepared during init. This may be module or global init. Code makes sure that nested constants belong into the same scope. """ # Got a couple of values to dodge, pylint: disable=too-many-return-statements if constant_value is None: return elif constant_value is False: return elif constant_value is True: return elif constant_value is Ellipsis: return elif constant_value is NotImplemented: return elif type(constant_value) is type: return elif constant_identifier in done: # Do not repeat ourselves. return if Options.shallTraceExecution(): emit("""NUITKA_PRINT_TRACE("Creating constant: %s");""" % constant_identifier) # Then it's a real named constant not yet created. __addConstantInitCode( context, emit, check, constant_type, constant_value, constant_identifier, module_level, ) # In debug mode, lets check if the constants somehow change behind our # back, add those values too. if Options.isDebug(): emit( """\ hash_%(constant_identifier)s = DEEP_HASH( %(constant_identifier)s );""" % {"constant_identifier": constant_identifier} ) check( """\ CHECK_OBJECT( %(constant_identifier)s ); assert( hash_%(constant_identifier)s == DEEP_HASH( %(constant_identifier)s ) );""" % {"constant_identifier": constant_identifier} )
def getConstantAccess(to_name, constant, emit, context): # Many cases, because for each type, we may copy or optimize by creating # empty. pylint: disable=too-many-branches,too-many-statements if to_name.c_type == "nuitka_bool" and Options.isDebug(): info("Missing optimization for constant to C bool.") if type(constant) is dict: if constant: for key, value in iterItems(constant): # key cannot be mutable. assert not isMutable(key) if isMutable(value): needs_deep = True break else: needs_deep = False if needs_deep: code = "DEEP_COPY( %s )" % context.getConstantCode(constant) else: code = "PyDict_Copy( %s )" % context.getConstantCode(constant) else: code = "PyDict_New()" ref_count = 1 elif type(constant) is set: if constant: code = "PySet_New( %s )" % context.getConstantCode(constant) else: code = "PySet_New( NULL )" ref_count = 1 elif type(constant) is list: if constant: for value in constant: if isMutable(value): needs_deep = True break else: needs_deep = False if needs_deep: code = "DEEP_COPY( %s )" % context.getConstantCode(constant) else: code = "LIST_COPY( %s )" % context.getConstantCode(constant) else: code = "PyList_New( 0 )" ref_count = 1 elif type(constant) is tuple: for value in constant: if isMutable(value): needs_deep = True break else: needs_deep = False if needs_deep: code = "DEEP_COPY( %s )" % context.getConstantCode(constant) ref_count = 1 else: code = context.getConstantCode(constant) ref_count = 0 elif type(constant) is bytearray: code = "BYTEARRAY_COPY( %s )" % context.getConstantCode(constant) ref_count = 1 else: code = context.getConstantCode(constant=constant) ref_count = 0 if to_name.c_type == "PyObject *": value_name = to_name else: value_name = context.allocateTempName("constant_value") emit("%s = %s;" % (value_name, code)) if ref_count: context.addCleanupTempName(value_name) if to_name is not value_name: to_name.getCType().emitAssignConversionCode( to_name=to_name, value_name=value_name, needs_check=False, emit=emit, context=context, )
def _addWindowsIconFromIcons(onefile): # Relatively detailed handling, pylint: disable=too-many-locals icon_group = 1 image_id = 1 images = [] result_filename = OutputDirectories.getResultFullpath(onefile=onefile) for icon_spec in Options.getIconPaths(): if "#" in icon_spec: icon_path, icon_index = icon_spec.rsplit("#", 1) icon_index = int(icon_index) else: icon_path = icon_spec icon_index = None icon_path = os.path.normcase(icon_path) if not icon_path.endswith(".ico"): postprocessing_logger.info("Not in icon format, converting.") if icon_index is not None: postprocessing_logger.sysexit( "Cannot specify indexes with non-ico format files in '%s'." % icon_spec) try: import imageio except ImportError: postprocessing_logger.sysexit( "Need to install imageio to use non-ico icon file in '%s'." % icon_spec) try: image = imageio.imread(icon_path) except ValueError: postprocessing_logger.sysexit( "Unsupported file format for imageio in '%s', use e.g. PNG files." % icon_spec) icon_build_path = os.path.join( OutputDirectories.getSourceDirectoryPath(onefile=onefile), "icons", ) makePath(icon_build_path) converted_icon_path = os.path.join( icon_build_path, "icon-%d.ico" % image_id, ) imageio.imwrite(converted_icon_path, image) icon_path = converted_icon_path with open(icon_path, "rb") as icon_file: # Read header and icon entries. header = readFromFile(icon_file, IconDirectoryHeader) icons = [ readFromFile(icon_file, IconDirectoryEntry) for _i in range(header.count) ] if icon_index is not None: if icon_index > len(icons): postprocessing_logger.sysexit( "Error, referenced icon index %d in file '%s' with only %d icons." % (icon_index, icon_path, len(icons))) icons[:] = icons[icon_index:icon_index + 1] postprocessing_logger.info( "Adding %d icon(s) from icon file '%s'." % (len(icons), icon_spec)) # Image data are to be scanned from places specified icon entries for icon in icons: icon_file.seek(icon.image_offset, 0) images.append(icon_file.read(icon.image_size)) parts = [convertStructureToBytes(header)] for icon in icons: parts.append( convertStructureToBytes( IconGroupDirectoryEntry( width=icon.width, height=icon.height, colors=icon.colors, reserved=icon.reserved, planes=icon.planes, bit_count=icon.bit_count, image_size=icon.image_size, id=image_id, ))) image_id += 1 addResourceToFile( target_filename=result_filename, data=b"".join(parts), resource_kind=RT_GROUP_ICON, lang_id=0, res_name=icon_group, logger=postprocessing_logger, ) for count, image in enumerate(images, 1): addResourceToFile( target_filename=result_filename, data=image, resource_kind=RT_ICON, lang_id=0, res_name=count, logger=postprocessing_logger, )
def executePostProcessingResources(manifest, onefile): """Adding Windows resources to the binary. Used for both onefile and not onefile binary, potentially two times. """ result_filename = OutputDirectories.getResultFullpath(onefile=onefile) # TODO: Maybe make these different for onefile and not onefile. if (Options.shallAskForWindowsAdminRights() or Options.shallAskForWindowsUIAccessRights()): if manifest is None: manifest = getDefaultWindowsExecutableManifest() if Options.shallAskForWindowsAdminRights(): manifest.addUacAdmin() if Options.shallAskForWindowsUIAccessRights(): manifest.addUacUiAccess() if manifest is not None: manifest.addResourceToFile(result_filename, logger=postprocessing_logger) if (Options.getWindowsVersionInfoStrings() or Options.getWindowsProductVersion() or Options.getWindowsFileVersion()): version_resources.update( addVersionInfoResource( string_values=Options.getWindowsVersionInfoStrings(), product_version=Options.getWindowsProductVersion(), file_version=Options.getWindowsFileVersion(), file_date=(0, 0), is_exe=not Options.shallMakeModule(), result_filename=result_filename, logger=postprocessing_logger, )) # Attach icons from template file if given. template_exe = Options.getWindowsIconExecutablePath() if template_exe is not None: res_copied = copyResourcesFromFileToFile( template_exe, target_filename=result_filename, resource_kinds=(RT_ICON, RT_GROUP_ICON), ) if res_copied == 0: postprocessing_logger.warning( "The specified icon template executable %r didn't contain anything to copy." % template_exe) else: postprocessing_logger.warning("Copied %d icon resources from %r." % (res_copied, template_exe)) else: _addWindowsIconFromIcons(onefile=onefile)
""" Control the flow of optimizations applied to node tree. Applies constraint collection on all so far known modules until no more optimization is possible. Every successful optimization to anything might make others possible. """ from logging import debug from nuitka import ModuleRegistry, Options, Utils from nuitka.Tracing import printLine from .ConstraintCollections import ConstraintCollectionModule from .Tags import TagSet _progress = Options.isShowProgress() def _attemptRecursion(module): new_modules = module.attemptRecursion() for new_module in new_modules: debug("{source_ref} : {tags} : {message}".format( source_ref=new_module.getSourceReference().getAsString(), tags="new_code", message="Recursed to module package.")) tag_set = None
def executePostProcessing(): """Postprocessing of the resulting binary. These are in part required steps, not usable after failure. """ result_filename = OutputDirectories.getResultFullpath(onefile=False) if not os.path.exists(result_filename): postprocessing_logger.sysexit( "Error, scons failed to create the expected file %r. " % result_filename) if isWin32Windows(): if not Options.shallMakeModule(): if python_version < 0x300: # Copy the Windows manifest from the CPython binary to the created # executable, so it finds "MSCRT.DLL". This is needed for Python2 # only, for Python3 newer MSVC doesn't hide the C runtime. manifest = getWindowsExecutableManifest(sys.executable) else: manifest = None executePostProcessingResources(manifest=manifest, onefile=False) source_dir = OutputDirectories.getSourceDirectoryPath() # Attach the binary blob as a Windows resource. addResourceToFile( target_filename=result_filename, data=getFileContents(getConstantBlobFilename(source_dir), "rb"), resource_kind=RT_RCDATA, res_name=3, lang_id=0, logger=postprocessing_logger, ) # On macOS, we update the executable path for searching the "libpython" # library. if (getOS() == "Darwin" and not Options.shallMakeModule() and not Options.shallUseStaticLibPython()): python_abi_version = python_version_str + getPythonABI() python_dll_filename = "libpython" + python_abi_version + ".dylib" python_lib_path = os.path.join(sys.prefix, "lib") # Note: For CPython and potentially others, the rpath for the Python # library needs to be set. callInstallNameTool( filename=result_filename, mapping=( ( python_dll_filename, os.path.join(python_lib_path, python_dll_filename), ), ( "@rpath/Python3.framework/Versions/%s/Python3" % python_version_str, os.path.join(python_lib_path, python_dll_filename), ), ), rpath=python_lib_path, ) # Modules should not be executable, but Scons creates them like it, fix # it up here. if not isWin32Windows() and Options.shallMakeModule(): removeFileExecutablePermission(result_filename) if isWin32Windows() and Options.shallMakeModule(): candidate = os.path.join( os.path.dirname(result_filename), "lib" + os.path.basename(result_filename)[:-4] + ".a", ) if os.path.exists(candidate): os.unlink(candidate) if isWin32Windows() and Options.shallTreatUninstalledPython(): shutil.copy(getTargetPythonDLLPath(), os.path.dirname(result_filename) or ".")
def _detectEarlyImports(): encoding_names = [ m[1] for m in pkgutil.iter_modules(sys.modules["encodings"].__path__) ] if os.name != "nt": # On posix systems, and posix Python variants on Windows, these won't # work and fail to import. for encoding_name in ("mbcs", "cp65001", "oem"): if encoding_name in encoding_names: encoding_names.remove(encoding_name) # Not for startup. for non_locale_encoding in ( "bz2_codec", "idna", "base64_codec", "hex_codec", "rot_13", ): if non_locale_encoding in encoding_names: encoding_names.remove(non_locale_encoding) import_code = ";".join("import encodings.%s" % encoding_name for encoding_name in sorted(encoding_names)) import_code += ";import locale;" # For Python3 we patch inspect without knowing if it is used. if python_version >= 0x300: import_code += "import inspect;import importlib._bootstrap" result = _detectImports(command=import_code, user_provided=False, technical=True) if Options.shallFreezeAllStdlib(): stdlib_modules = set() # Scan the standard library paths (multiple in case of virtualenv). for stdlib_dir in getStandardLibraryPaths(): for module_name in scanStandardLibraryPath(stdlib_dir): if not isStandardLibraryNoAutoInclusionModule(module_name): stdlib_modules.add(module_name) # Put here ones that should be imported first. first_ones = ("Tkinter", ) # We have to fight zombie modules in this, some things, e.g. Tkinter # on newer Python 2.7, comes back after failure without a second error # being raised, leading to other issues. So we fight it after each # module that was tried, and prevent re-try by adding a meta path # based loader that will never load it again, and remove it from the # "sys.modules" over and over, once it sneaks back. The root cause is # that extension modules sometimes only raise an error when first # imported, not the second time around. # Otherwise this just makes imports of everything so we can see where # it comes from and what it requires. import_code = """ imports = %r failed = set() class ImportBlocker(object): def find_module(self, fullname, path = None): if fullname in failed: return self return None def load_module(self, name): raise ImportError("%%s has failed before" %% name) sys.meta_path.insert(0, ImportBlocker()) for imp in imports: try: __import__(imp) except (ImportError, SyntaxError): failed.add(imp) except ValueError as e: if "cannot contain null bytes" in e.args[0]: failed.add(imp) else: sys.stderr.write("PROBLEM with '%%s'\\n" %% imp) raise except Exception: sys.stderr.write("PROBLEM with '%%s'\\n" %% imp) raise for fail in failed: if fail in sys.modules: del sys.modules[fail] """ % (tuple(module_name.asString() for module_name in sorted(stdlib_modules, key=lambda name: (name not in first_ones, name))), ) early_names = [module.getFullName() for module in result] result += [ module for module in _detectImports( command=import_code, user_provided=False, technical=False) if module.getFullName() not in early_names ] return result
def copyDllsUsed(source_dir, dist_dir, standalone_entry_points): # This is complex, because we also need to handle OS specifics. used_dlls = _detectUsedDLLs( source_dir=source_dir, standalone_entry_points=standalone_entry_points, use_cache=not Options.shallNotUseDependsExeCachedResults() and Options.getWindowsDependencyTool() != "depends.exe", update_cache=not Options.shallNotStoreDependsExeCachedResults() and Options.getWindowsDependencyTool() != "depends.exe", ) duplicate_dlls = _removeDuplicateDlls(used_dlls=used_dlls) dll_map = _copyDllsUsed(dist_dir=dist_dir, used_dlls=used_dlls) # TODO: This belongs inside _copyDllsUsed if Utils.isMacOS(): # For macOS, the binary and the DLLs needs to be changed to reflect # the relative DLL location in the ".dist" folder. for standalone_entry_point in standalone_entry_points: _fixupBinaryDLLPathsMacOS( binary_filename=standalone_entry_point.dest_path, package_name=standalone_entry_point.package_name, dll_map=dll_map, duplicate_dlls=duplicate_dlls, original_location=standalone_entry_point.source_path, ) for original_path, package_name, dll_filename in dll_map: _fixupBinaryDLLPathsMacOS( binary_filename=os.path.join(dist_dir, dll_filename), package_name=package_name, dll_map=dll_map, duplicate_dlls=duplicate_dlls, original_location=original_path, ) # Remove or update rpath settings. if Utils.getOS() == "Linux": # For Linux, the "rpath" of libraries may be an issue and must be # removed. for standalone_entry_point in standalone_entry_points[1:]: count = relpath(path=standalone_entry_point.dest_path, start=dist_dir).count(os.path.sep) rpath = os.path.join("$ORIGIN", *([".."] * count)) setSharedLibraryRPATH(standalone_entry_point.dest_path, rpath) for _original_path, _package_name, dll_filename in dll_map: setSharedLibraryRPATH(os.path.join(dist_dir, dll_filename), "$ORIGIN") if Utils.isMacOS(): setSharedLibraryRPATH(standalone_entry_points[0].dest_path, "$ORIGIN") addMacOSCodeSignature(filenames=[ standalone_entry_point.dest_path for standalone_entry_point in standalone_entry_points ] + [ os.path.join(dist_dir, dll_filename) for _original_path, _package_name, dll_filename in dll_map ]) Plugins.onCopiedDLLs(dist_dir=dist_dir, used_dlls=used_dlls)
def decideRecursion(module_filename, module_name, module_package, module_kind ): # Many branches, which make decisions immediately, by returning # pylint: disable=R0911,R0912 if module_kind == "shlib": if Options.isStandaloneMode(): return True, "Shared library for inclusion." else: return False, "Shared library cannot be inspected." if module_package is None: full_name = module_name else: full_name = module_package + "." + module_name if isFrozenModule(full_name): return False, "Module is frozen." no_case_modules = Options.getShallFollowInNoCase() for no_case_module in no_case_modules: if full_name == no_case_module: return ( False, "Module listed explicitely to not recurse to." ) if full_name.startswith(no_case_module + "."): return ( False, "Module in package listed explicitely to not recurse to." ) any_case_modules = Options.getShallFollowModules() for any_case_module in any_case_modules: if full_name == any_case_module: return ( True, "Module listed explicitely to recurse to." ) if full_name.startswith(any_case_module + "." ): return ( True, "Module in package listed explicitely to recurse to." ) if Options.shallFollowNoImports(): return ( False, "Requested to not recurse at all." ) if Importing.isStandardLibraryPath(module_filename): return ( Options.shallFollowStandardLibrary(), "Requested to %srecurse to standard library." % ( "" if Options.shallFollowStandardLibrary() else "not " ) ) if Options.shallFollowAllImports(): return ( True, "Requested to recurse to all non-standard library modules." ) # Means, we were not given instructions how to handle things. return ( None, "Default behaviour, not recursing without request." )
progress_logger, recursion_logger, ) from nuitka.utils.AppDirs import getCacheDir from nuitka.utils.FileOperations import makePath from nuitka.utils.MemoryUsage import ( MemoryWatch, getHumanReadableProcessMemoryUsage, ) from . import Graphs from .BytecodeDemotion import demoteCompiledModuleToBytecode from .Tags import TagSet from .TraceCollections import withChangeIndicationsTo _progress = Options.isShowProgress() _is_verbose = Options.isVerbose() def _attemptRecursion(module): new_modules = module.attemptRecursion() if Options.isShowInclusion(): for new_module in new_modules: recursion_logger.info("{source_ref} : {tags} : {message}".format( source_ref=new_module.getSourceReference().getAsString(), tags="new_code", message="Recursed to module package.", ))
def _detectedSourceFile(filename, module_name, result, user_provided, technical): if module_name in module_names: return if module_name == "collections.abc": _detectedSourceFile( filename=filename, module_name=ModuleName("_collections_abc"), result=result, user_provided=user_provided, technical=technical, ) source_code = readSourceCodeFromFilename(module_name, filename) if module_name == "site": if source_code.startswith("def ") or source_code.startswith("class "): source_code = "\n" + source_code source_code = """\ __file__ = (__nuitka_binary_dir + '%s%s') if '__nuitka_binary_dir' in dict(__builtins__ ) else '<frozen>';%s""" % ( os.path.sep, os.path.basename(filename), source_code, ) # Debian stretch site.py source_code = source_code.replace( "PREFIXES = [sys.prefix, sys.exec_prefix]", "PREFIXES = []") if Options.isShowInclusion(): inclusion_logger.info("Freezing module '%s' (from '%s')." % (module_name, filename)) is_package = os.path.basename(filename) == "__init__.py" # Plugins can modify source code: source_code = Plugins.onFrozenModuleSourceCode(module_name=module_name, is_package=is_package, source_code=source_code) bytecode = compileSourceToBytecode( source_code=source_code, filename=module_name.replace(".", os.path.sep) + ".py", ) # Plugins can modify bytecode code: bytecode = Plugins.onFrozenModuleBytecode(module_name=module_name, is_package=is_package, bytecode=bytecode) uncompiled_module = makeUncompiledPythonModule( module_name=module_name, bytecode=marshal.dumps(bytecode), is_package=is_package, filename=filename, user_provided=user_provided, technical=technical, ) ImportCache.addImportedModule(uncompiled_module) result.append(uncompiled_module) module_names.add(module_name)
def onEnterNode(self, node): # This has many different things it deals with, so there need to be a # lot of branches and statements, pylint: disable=R0912,R0915 if node.isExpressionFunctionBody(): if node.isUnoptimized(): node.markAsLocalsDict() if node.needsLocalsDict(): provider = node.getParentVariableProvider() if provider.isExpressionFunctionBody(): provider.markAsLocalsDict() if node.isStatementBreakLoop(): search = node.getParent() # Search up to the containing loop. while not search.isStatementLoop(): last_search = search search = search.getParent() if search.isStatementTryFinally( ) and last_search == search.getBlockTry(): search.markAsExceptionBreak() node.markAsExceptionDriven() if node.isExceptionDriven(): search.markAsExceptionBreak() if node.isStatementContinueLoop(): search = node.getParent() # Search up to the containing loop. while not search.isStatementLoop(): last_search = search search = search.getParent() if search.isStatementTryFinally() and \ last_search == search.getBlockTry(): search.markAsExceptionContinue() node.markAsExceptionDriven() if node.isExceptionDriven(): search.markAsExceptionContinue() if node.isExpressionYield() or node.isExpressionYieldFrom(): search = node.getParent() while not search.isExpressionFunctionBody(): last_search = search search = search.getParent() if Utils.python_version >= 300 and \ search.isStatementTryFinally() and \ last_search == search.getBlockTry(): node.markAsExceptionPreserving() break if search.isStatementExceptHandler(): node.markAsExceptionPreserving() break if node.isStatementReturn() or node.isStatementGeneratorReturn(): search = node.getParent() exception_driven = False last_found = None # Search up to the containing function, and check for a try/finally # containing the "return" statement. while not search.isExpressionFunctionBody(): last_search = search search = search.getParent() if search.isStatementTryFinally() and \ last_search == search.getBlockTry(): search.markAsExceptionReturnValueCatch() exception_driven = True if last_found is not None: last_found.markAsExceptionReturnValueReraise() last_found = search if exception_driven: search.markAsExceptionReturnValue() node.setExceptionDriven(exception_driven) if node.isStatementRaiseException() and node.isReraiseException(): search = node.getParent() # Check if it's in a try/except block. while not search.isParentVariableProvider(): if search.isStatementsSequence(): if search.getParent().isStatementExceptHandler(): node.markAsReraiseLocal() break if search.getParent().isStatementTryFinally() and \ Utils.python_version >= 300: node.markAsReraiseFinally() search = search.getParent() search = node.getParent() if node.isStatementDelVariable(): variable = node.getTargetVariableRef().getVariable() while variable.isReference(): variable = variable.getReferenced() variable.setHasDelIndicator() if node.isStatementTryExcept(): provider = node.getParentVariableProvider() provider.markAsTryExceptContaining() if not node.isStatementTryExceptOptimized(): parent_frame = node.getParentStatementsFrame() parent_frame.markAsFrameExceptionPreserving() if node.isStatementTryFinally(): provider = node.getParentVariableProvider() provider.markAsTryFinallyContaining() if Utils.python_version >= 300: parent_frame = node.getParentStatementsFrame() parent_frame.markAsFrameExceptionPreserving() if node.isStatementRaiseException(): provider = node.getParentVariableProvider() provider.markAsRaiseContaining() if node.isExpressionBuiltinImport() and \ not Options.getShallFollowExtra() and \ not isWhileListedImport(node): warning("""Unresolved '__import__' call at '%s' may require use \ of '--recurse-directory'.""" % (node.getSourceReference().getAsString())) if node.isExpressionFunctionCreation(): if not node.getParent().isExpressionFunctionCall(): node.getFunctionRef().getFunctionBody().markAsNeedsCreation() if node.isExpressionFunctionCall(): node.getFunction().getFunctionRef().getFunctionBody().\ markAsDirectlyCalled() if node.isExpressionFunctionRef(): parent_module = node.getFunctionBody().getParentModule() if node.getParentModule() is not parent_module: node.getFunctionBody().markAsCrossModuleUsed()
def _removeDuplicateDlls(used_dlls): # Many things to consider, pylint: disable=too-many-branches removed_dlls = set() warned_about = set() # Identical DLLs are interesting for DLL resolution on macOS at least. duplicate_dlls = {} # Fist make checks and remove some, in loops we copy the items so we can remove # the used_dll list freely. for dll_filename1, (_package_name1, sources1) in tuple(iterItems(used_dlls)): if dll_filename1 in removed_dlls: continue for dll_filename2, (_package_name1, sources2) in tuple(iterItems(used_dlls)): if dll_filename1 == dll_filename2: continue if dll_filename2 in removed_dlls: continue # Colliding basenames are an issue to us. if os.path.basename(dll_filename1) != os.path.basename( dll_filename2): continue # May already have been removed earlier if dll_filename1 not in used_dlls: continue if dll_filename2 not in used_dlls: continue dll_name = os.path.basename(dll_filename1) if Options.isShowInclusion(): inclusion_logger.info( """Colliding DLL names for %s, checking identity of \ '%s' <-> '%s'.""" % (dll_name, dll_filename1, dll_filename2)) # Check that if a DLL has the same name, if it's identical, then it's easy. if haveSameFileContents(dll_filename1, dll_filename2): del used_dlls[dll_filename2] removed_dlls.add(dll_filename2) duplicate_dlls.setdefault(dll_filename1, []).append(dll_filename2) duplicate_dlls.setdefault(dll_filename2, []).append(dll_filename1) continue # For Win32 we can check out file versions. if Utils.isWin32Windows(): dll_version1 = getWindowsDLLVersion(dll_filename1) dll_version2 = getWindowsDLLVersion(dll_filename2) if dll_version2 < dll_version1: del used_dlls[dll_filename2] removed_dlls.add(dll_filename2) solved = True elif dll_version1 < dll_version2: del used_dlls[dll_filename1] removed_dlls.add(dll_filename1) solved = True else: solved = False if solved: if dll_name not in warned_about and dll_name not in ms_runtime_dlls: warned_about.add(dll_name) inclusion_logger.warning( "Conflicting DLLs for '%s' in your installation, newest file version used, hoping for the best." % dll_name) continue # So we have conflicting DLLs, in which case we do report the fact. inclusion_logger.warning("""\ Ignoring non-identical DLLs for '%s'. %s used by: %s different from %s used by %s""" % ( dll_name, dll_filename1, "\n ".join(sources1), dll_filename2, "\n ".join(sources2), )) del used_dlls[dll_filename2] removed_dlls.add(dll_filename2) return duplicate_dlls
from nuitka.__past__ import iter_modules from nuitka.containers.oset import OrderedSet from nuitka.importing import StandardLibrary from nuitka.plugins.Plugins import Plugins from nuitka.PythonVersions import python_version from nuitka.Tracing import my_print, recursion_logger from nuitka.utils.AppDirs import getCacheDir from nuitka.utils.FileOperations import listDir, removeDirectory from nuitka.utils.Importing import getSharedLibrarySuffixes from nuitka.utils.ModuleNames import ModuleName from nuitka.utils.Utils import getOS, isMacOS from .IgnoreListing import isIgnoreListedNotExistingModule from .PreloadedPackages import getPreloadedPackagePath, isPreloadedPackagePath _debug_module_finding = Options.shallExplainImports() warned_about = set() # Directory where the main script lives. Should attempt to import from there. main_path = None def setMainScriptDirectory(main_dir): """Initialize the main script directory. We use this as part of the search path for modules. """ # We need to set this from the outside, pylint: disable=global-statement global main_path
def isRelevant(cls): return Utils.getOS() == "Windows" and not Options.shallMakeModule()
def _findModuleInPath2(package_name, module_name, search_path): """This is out own module finding low level implementation. Just the full module name and search path are given. This is then tasked to raise "ImportError" or return a path if it finds it, or None, if it is a built-in. """ # We have many branches here, because there are a lot of cases to try. # pylint: disable=too-many-branches,too-many-locals # We may have to decide between package and module, therefore build # a list of candidates. candidates = OrderedSet() considered = set() # Higher values are lower priority. priority_map = { imp.PY_COMPILED: 3, imp.PY_SOURCE: 0 if Options.shallPreferSourcecodeOverExtensionModules() else 2, imp.C_EXTENSION: 1, } for count, entry in enumerate(search_path): # Don't try again, just with an entry of different casing or complete # duplicate. if os.path.normcase(entry) in considered: continue considered.add(os.path.normcase(entry)) package_directory = os.path.join(entry, module_name.asPath()) # First, check for a package with an init file, that would be the # first choice. if os.path.isdir(package_directory): found = False for suffix, _mode, module_type in imp.get_suffixes(): if module_type == imp.C_EXTENSION: continue package_file_name = "__init__" + suffix file_path = os.path.join(package_directory, package_file_name) if os.path.isfile(file_path): candidates.add( ImportScanFinding( found_in=entry, priority=priority_map[module_type], full_path=package_directory, search_order=count, ) ) found = True if not found and python_version >= 0x300: candidates.add( ImportScanFinding( found_in=entry, priority=10, full_path=package_directory, search_order=count + len(search_path), ) ) # Then, check out suffixes of all kinds, but only for one directory. last_module_type = 0 for suffix, _mode, module_type in imp.get_suffixes(): # Use first match per kind only. if module_type == last_module_type: continue full_path = os.path.join(entry, module_name + suffix) if os.path.isfile(full_path): candidates.add( ImportScanFinding( found_in=entry, priority=4 + priority_map[module_type], full_path=full_path, search_order=count, ) ) last_module_type = module_type if _debug_module_finding: my_print("Candidates:", candidates) if candidates: # Sort by priority, with entries from same path element coming first, then desired type. candidates = sorted(candidates, key=lambda c: (c.search_order, c.priority)) # On case sensitive systems, no resolution needed. if case_sensitive: _reportCandidates( package_name=package_name, module_name=module_name, candidate=candidates[0], candidates=candidates, ) return candidates[0].full_path else: for candidate in candidates: for fullname, _filename in listDir(candidate[0]): if fullname == candidate.full_path: _reportCandidates( package_name=package_name, module_name=module_name, candidate=candidate, candidates=candidates, ) return candidate.full_path # Only exact case matches matter, all candidates were ignored, # lets just fall through to raising the import error. # Nothing found. raise ImportError
def buildParseTree(provider, source_code, source_ref, is_module, is_main): # There are a bunch of branches here, mostly to deal with version # differences for module default variables. pylint: disable=too-many-branches pushFutureSpec() if is_module: provider.future_spec = getFutureSpec() body = parseSourceCodeToAst(source_code=source_code, filename=source_ref.getFilename(), line_offset=source_ref.getLineNumber() - 1) body, doc = extractDocFromBody(body) if is_module and is_main and python_version >= 360: provider.markAsNeedsAnnotationsDictionary() result = buildStatementsNode(provider=provider, nodes=body, source_ref=source_ref) checkFutureImportsOnlyAtStart(body) internal_source_ref = source_ref.atInternal() statements = [] if is_module: # Add import of "site" module of main programs visibly in the node tree, # so recursion and optimization can pick it up, checking its effects. if is_main and "no_site" not in Options.getPythonFlags(): for path_imported_name in getPthImportedPackages(): statements.append( StatementExpressionOnly(expression=makeAbsoluteImportNode( module_name=path_imported_name, source_ref=source_ref, ), source_ref=source_ref)) statements.append( StatementExpressionOnly(expression=makeAbsoluteImportNode( module_name="site", source_ref=source_ref, ), source_ref=source_ref)) statements.append( StatementAssignmentVariableName(provider=provider, variable_name="__doc__", source=makeConstantRefNode( constant=doc, source_ref=internal_source_ref, user_provided=True), source_ref=internal_source_ref)) statements.append( StatementAssignmentVariableName( provider=provider, variable_name="__file__", source=ExpressionModuleAttributeFileRef( variable=provider.getVariableForReference("__file__"), source_ref=internal_source_ref, ), source_ref=internal_source_ref)) if provider.isCompiledPythonPackage(): # This assigns "__path__" value. statements.append( createPathAssignment(provider, internal_source_ref)) if python_version >= 300: statements.append( StatementAssignmentVariableName(provider=provider, variable_name="__cached__", source=ExpressionConstantNoneRef( source_ref=internal_source_ref, user_provided=True), source_ref=internal_source_ref)) needs__initializing__ = not provider.isMainModule( ) and 300 <= python_version < 340 if needs__initializing__: # Set "__initializing__" at the beginning to True statements.append( StatementAssignmentVariableName(provider=provider, variable_name="__initializing__", source=makeConstantRefNode( constant=True, source_ref=internal_source_ref, user_provided=True), source_ref=internal_source_ref)) if provider.needsAnnotationsDictionary(): # Set "__annotations__" on module level to {} statements.append( StatementAssignmentVariableName(provider=provider, variable_name="__annotations__", source=makeConstantRefNode( constant={}, source_ref=internal_source_ref, user_provided=True), source_ref=internal_source_ref)) # Now the module body if there is any at all. if result is not None: statements.extend(result.getStatements()) if needs__initializing__: # Set "__initializing__" at the end to False statements.append( StatementAssignmentVariableName(provider=provider, variable_name="__initializing__", source=makeConstantRefNode( constant=False, source_ref=internal_source_ref, user_provided=True), source_ref=internal_source_ref)) if is_module: result = makeModuleFrame(module=provider, statements=statements, source_ref=source_ref) popFutureSpec() return result else: assert False
def main(): parser = OptionParser() parser.add_option( "--verbose", action="store_true", dest="verbose", default=False, help="""\ Be verbose in output. Default is %default.""", ) parser.add_option( "--pefile", action="store_true", dest="pefile", default=False, help="""\ Use pefile dependencies. Default is %default.""", ) options, positional_args = parser.parse_args() if not positional_args: sys.exit("No DLLs given.") for filename in positional_args: print("Filename:", filename) print("Version Information:", getWindowsDLLVersion(filename)) print("DLLs directly dependended (pefile):", getPEFileInformation(filename)) print("SXS information (manifests):") sxs = getSxsFromDLL(filename=filename, with_data=True) if sxs: print(sxs) print("DLLs recursively dependended (pefile):") if options.pefile: with TimerReport( "Finding dependencies for %s took %%.2f seconds" % filename ): from nuitka import Options Options.enableExperimental("use_pefile") r = detectBinaryPathDLLsWindowsPE( is_main_executable=False, source_dir="notexist", original_dir=os.path.dirname(filename), binary_filename=filename, package_name=None, use_cache=False, update_cache=True, ) Options.disableExperimental("use_pefile") for dll_filename in sorted(r): print(" ", dll_filename) print("Total: %d" % len(r)) print("DLLs recursively dependended (depends.exe):") with TimerReport("Finding dependencies for %s took %%.2f seconds" % filename): r = detectBinaryPathDLLsWindowsDependencyWalker( is_main_executable=False, source_dir="notexist", original_dir=os.path.dirname(filename), binary_filename=filename, package_name=None, use_cache=False, update_cache=False, ) for dll_filename in sorted(r): print(" ", dll_filename) print("Total: %d" % len(r))
def createPlistInfoFile(logger, onefile): # Many details, pylint: disable=too-many-locals import plistlib if Options.isStandaloneMode(): bundle_dir = os.path.dirname(OutputDirectories.getStandaloneDirectoryPath()) else: bundle_dir = os.path.dirname( OutputDirectories.getResultRunFilename(onefile=onefile) ) result_filename = OutputDirectories.getResultFullpath(onefile=onefile) app_name = Options.getMacOSAppName() or os.path.basename(result_filename) signed_app_name = Options.getMacOSSignedAppName() or app_name app_version = Options.getMacOSAppVersion() or "1.0" # TODO: We want an OrderedDict probably for stability. infos = OrderedDict( [ ("CFBundleDisplayName", app_name), ("CFBundleName", app_name), ("CFBundleIdentifier", signed_app_name), ("CFBundleExecutable", app_name), ("CFBundleInfoDictionaryVersion", "6.0"), ("CFBundlePackageType", "APPL"), ("CFBundleShortVersionString", app_version), ] ) icon_paths = Options.getIconPaths() if icon_paths: assert len(icon_paths) == 1 icon_path = icon_paths[0] # Convert to single macOS .icns file if necessary if not icon_path.endswith(".icns"): logger.info( "File '%s' is not in macOS icon format, converting to it." % icon_path ) icon_build_path = os.path.join( OutputDirectories.getSourceDirectoryPath(onefile=onefile), "icons", ) makePath(icon_build_path) converted_icon_path = os.path.join( icon_build_path, "Icons.icns", ) convertImageToIconFormat( logger=logger, image_filename=icon_path, icon_filename=converted_icon_path, ) icon_path = converted_icon_path icon_name = os.path.basename(icon_path) resources_dir = os.path.join(bundle_dir, "Resources") makePath(resources_dir) copyFile(icon_path, os.path.join(resources_dir, icon_name)) infos["CFBundleIconFile"] = icon_name # Console mode, which is why we have to use bundle in the first place typically. if Options.shallDisableConsoleWindow(): infos["NSHighResolutionCapable"] = True else: infos["LSBackgroundOnly"] = True filename = os.path.join(bundle_dir, "Info.plist") if str is bytes: plist_contents = plistlib.writePlistToString(infos) else: plist_contents = plistlib.dumps(infos) with openTextFile(filename=filename, mode="wb") as plist_file: plist_file.write(plist_contents)
def optimizeCompiledPythonModule(module): optimization_logger.info_fileoutput( "Doing module local optimizations for '{module_name}'.".format( module_name=module.getFullName()), other_logger=progress_logger, ) touched = False if _progress and Options.isShowMemory(): memory_watch = MemoryWatch() # Temporary workaround, since we do some optimization based on the last pass results # that are then not yet fully seen in the traces yet until another time around, we # allow to continue the loop even without changes one more time. unchanged_count = 0 while True: tag_set.clear() try: # print("Compute module") with withChangeIndicationsTo(signalChange): scopes_were_incomplete = module.computeModule() except BaseException: general.info("Interrupted while working on '%s'." % module) raise if scopes_were_incomplete: tag_set.add("var_usage") Graphs.onModuleOptimizationStep(module) # Ignore other modules brought into the game. if "new_code" in tag_set: tag_set.remove("new_code") # Search for local change tags. if not tag_set: unchanged_count += 1 if unchanged_count == 1 and pass_count == 1: optimization_logger.info_fileoutput( "No changed, but retrying one more time.", other_logger=progress_logger, ) continue optimization_logger.info_fileoutput("Finished with the module.", other_logger=progress_logger) break unchanged_count = 0 optimization_logger.info_fileoutput( "Not finished with the module due to following change kinds: %s" % ",".join(sorted(tag_set)), other_logger=progress_logger, ) # Otherwise we did stuff, so note that for return value. touched = True if _progress and Options.isShowMemory(): memory_watch.finish() memory_logger.info( "Memory usage changed during optimization of '%s': %s" % (module.getFullName(), memory_watch.asStr())) Plugins.considerImplicitImports(module=module, signal_change=signalChange) return touched