def getTests(self): """ Get the test objects contained within this module. """ # If there are no cached results, gather the sub-tests based on # the type of module. if not self.tests: # Get the test function(s) defined in this module for name in dir(self.module): if name == 'Test': #name.startswith('Test'): obj = getattr(self.module, name) if callable(obj): self.tests.append(TestFunction.TestFunction(obj)) # If this is a package, get the available modules if self.isPackage: files = [] dirs = [] path = ImportUtil.GetSearchPath(self.path) for importer, name, ispkg in ImportUtil.IterModules(path): if ispkg: dirs.append(name) else: files.append(name) # Default running order is alphabetical dirs.sort() files.sort() # Let the module manipulate the test lists if hasattr(self.module, 'PreprocessFiles'): (dirs, files) = self.module.PreprocessFiles(dirs, files) # Add the test lists to our available tests for name in dirs + files: self.addTest(name) # If this modules defines a CoverageModule, add the coverage # start and end functions. if hasattr(self.module, 'CoverageModule'): ignored = None if hasattr(self.module, 'CoverageIgnored'): ignored = self.module.CoverageIgnored ct = TestCoverage.TestCoverage(self.module.CoverageModule, ignored) self.tests.insert(0, TestFunction.TestFunction(ct._start)) self.tests.append(TestFunction.TestFunction(ct._end)) return self.tests
def _scan_path(self, path): if DEBUG: print "PackageManager._scan_path(): scanning", path if path.endswith('.egg'): pathname = os.path.join(path, 'EGG-INFO', 'PKG-INFO') if DEBUG: print " loading", pathname if os.path.isdir(path): # .egg directory yield Dist.DistributionMetadata.from_filename(pathname) else: # .egg zipfile importer = ImportUtil.GetImporter(path) if importer is not None: data = importer.get_data(pathname) yield Dist.DistributionMetadata.from_string(data) elif os.path.isdir(path): # look for .egg-info entries for name in os.listdir(path): # scan for .egg-info entries if name.endswith('.egg-info'): pathname = os.path.join(path, name) # .egg-info directories contain 'PKG-INFO' files, # .egg-info files are the 'PKG-INFO' file. if os.path.isdir(pathname): pathname = os.path.join(pathname, 'PKG-INFO') if DEBUG: print " loading", pathname print pathname yield Dist.DistributionMetadata.from_filename(pathname) return
def finalize_options(self): self.set_undefined_options('build_docs', ('build_dir', 'build_dir')) self.set_undefined_options('install', ('install_docs', 'install_dir'), ('skip_build', 'skip_build'), ('force', 'force')) self.documents = self.get_documents() resourcebundle = GetConfigVar('RESOURCEBUNDLE') if resourcebundle is None: # building 4Suite itself; use different (hard-wired) directories base_uri = Uri.OsPathToUri(os.path.join('Ft', 'Data')) else: datadir = GetConfigVar('DATADIR') datadir = os.path.join(datadir, 'Data', 'Stylesheets') if resourcebundle: resource = ImportUtil.OsPathToResource(datadir) base_uri = Uri.ResourceToUri('Ft.Lib', resource) else: base_uri = Uri.OsPathToUri(datadir) defaults = self.get_default_stylesheets() for name in defaults: attr = name + '_xslt' value = getattr(self, attr) if value is None: value = base_uri + '/' + defaults[name] else: pathname = util.convert_path(value) value = Uri.OsPathToUri(pathname) setattr(self, attr, value) self._xslt_processor = None self._stylesheets = {} return
def GetDefaultCatalog(basename='default.cat'): """ Load the default catalog file(s). """ quiet = 'XML_DEBUG_CATALOG' not in os.environ uris = [] # original 4Suite XML Catalog support if 'XML_CATALOGS' in os.environ: # os.pathsep seperated list of pathnames for path in os.environ['XML_CATALOGS'].split(os.pathsep): uris.append(Uri.OsPathToUri(path)) # libxml2 XML Catalog support if 'XML_CATALOG_FILES' in os.environ: # whitespace-separated list of pathnames or URLs (ick!) for path in os.environ['XML_CATALOG_FILES'].split(): # if its already not already an URL, make it one if not Uri.IsAbsolute(path): uris.append(Uri.OsPathToUri(path)) else: uris.append(path) # add the default 4Suite catalog pathname = os.path.join(GetConfigVar('DATADIR'), basename) if GetConfigVar('RESOURCEBUNDLE'): resource = ImportUtil.OsPathToResource(pathname) uri = Uri.ResourceToUri('Ft.Xml', resource) else: uri = Uri.OsPathToUri(pathname) uris.append(uri) if not quiet: prefix = "Catalog URIs:" for uri in uris: sys.stderr.write('%s %s\n' % (prefix, uri)) prefix = " "*len(prefix) catalog = None for uri in uris: if not quiet: sys.stderr.write('Reading %s\n' % uri) sys.stderr.flush() try: # FIXME: Use dict merging rather than this inefficient cascading if catalog is None: if not quiet: sys.stderr.write('Creating catalog from %s\n' % uri) sys.stderr.flush() catalog = Catalog(uri, quiet) else: if not quiet: sys.stderr.write('Appending %s\n' % uri) sys.stderr.flush() catalog.catalogs.append(Catalog(uri, quiet)) except UriException, e: #warnings.warn("Catalog resource (%s) disabled: %s" % (uri, # e.message), # FtWarning) pass
def document(self, category, name, sources, object, formatter): xmlfile = self.get_output_filename(name, category) self.mkpath(os.path.dirname(xmlfile)) # The dependencies for 'object' are the source for the formatter # and, of course, 'sources'. formatter_module = formatter.__class__.__module__ source_mtime = max(ImportUtil.GetLastModified(formatter_module), *map(os.path.getmtime, sources)) try: target_mtime = os.path.getmtime(xmlfile) except OSError: target_mtime = -1 if self.force or source_mtime > target_mtime: self.announce("documenting %s -> %s" % (name, xmlfile), 2) if not self.dry_run: try: stream = open(xmlfile, 'w') try: formatter.format(object, stream, encoding='iso-8859-1') finally: stream.close() except (KeyboardInterrupt, SystemExit): os.remove(xmlfile) raise except Exception, exc: os.remove(xmlfile) if DEBUG: raise raise DistutilsExecError("could not document %s (%s)" % (name, exc))
def __init__(self, name, module, addModes, skipModes, allModes): TestLoader.TestLoader.__init__(self, name, module.__name__, addModes, skipModes, allModes) self.module = module self.modes = self.getModes(addModes, skipModes, allModes) loader = ImportUtil.FindLoader(self.path) self.isPackage = loader.is_package(self.path) return
def GetModuleIncludes(modules): use_resources = GetConfigVar('RESOURCEBUNDLE') source_vars = {} for config_name, var_name in InstallConfig.CONFIG_MAPPING.items(): source_vars[var_name] = GetConfigVar(config_name.upper()) target_vars = Install.GetBundleScheme() includes = [] for module in DATA_FILES: if module in modules: for filespec in DATA_FILES[module]: source = subst_vars(convert_path(filespec), source_vars) if use_resources: resource = ImportUtil.OsPathToResource(source) source = ImportUtil.GetResourceFilename(module, resource) target = subst_vars(convert_path(filespec), target_vars)[1:] includes.append((source, target)) return includes
def _getFunctionList(self): modules = [] deferred = [] for importer, name, ispkg in ImportUtil.IterModules(self.path): fullname = self.module + '.' + name try: __import__(fullname) except ImportError: deferred.append(fullname) else: modules.append(sys.modules[fullname]) while deferred: changed = False for fullname in tuple(deferred): try: __import__(fullname) except ImportError: pass else: changed = True modules.append(sys.modules[fullname]) deferred.remove(fullname) if not changed: # no more modules able to be loaded raise ValueError(",".join([item[1] for item in deferred])) break data = {} for module in modules: for value in vars(module).values(): if value in self.ignored: continue if inspect.isfunction(value): data[value.func_code] = value elif inspect.isclass(value): # Get the methods defined on this class methods = inspect.getmembers(value, inspect.ismethod) for name, method in methods: if method in self.ignored or \ name in ('__str__', '__repr__', 'pprint'): continue # Make sure this method is not a C function if inspect.isfunction(method.im_func): data[method.im_func.func_code] = method # Only watch the objects that are defined directly in this package self.data = {} for code, object in data.items(): if os.path.dirname(code.co_filename) in self.path: self.data[code] = object return
def newer(self, description, filename): try: target_mtime = os.stat(filename).st_mtime except OSError: return True if self.distribution.package_file: source_mtime = os.stat(self.distribution.package_file).st_mtime if source_mtime > target_mtime: return True command_mtime = ImportUtil.GetLastModified(__name__) if command_mtime > target_mtime: return True self.announce("skipping %s (up-to-date)" % description, 1) return False
def __init__(self, moduleName, ignored=None): self.module = moduleName self.path = ImportUtil.GetSearchPath(moduleName) self.ignored = ignored or [] self.data = {} return
def build_extension(self, ext): # First, scan the sources for SWIG definition files (.i), run # SWIG on 'em to create .c files, and modify the sources list # accordingly. sources = self.prepare_sources(ext) fullname = self.get_ext_fullname(ext.name) ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) # Changes to the command indicate that compilation options may have # changed so rebuild/link everything command_mtime = ImportUtil.GetLastModified(__name__) try: force = command_mtime > os.stat(ext_filename).st_mtime except OSError: force = True force = self.force or force depends = sources + ext.depends for includes in ext.includes.values(): depends.extend(includes) if not (force or newer_group(depends, ext_filename, 'newer')): self.announce("skipping '%s' extension (up-to-date)" % ext.name) return self.announce("building '%s' extension" % ext.name, 2) # Next, compile the source code to object files. extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef,)) # Get the resulting object filenames as we are compiling the sources # one at a time to reduce compile time for large source lists. objects = self.compiler.object_filenames(sources, sysconfig.python_build, self.build_temp) self.compiler.force = force if sys.version >= '2.3': # Python 2.3 added dependency checking to the compiler, use that for object, source in zip(objects, sources): depends = ext.depends + ext.includes[source] self.compiler.compile([source], output_dir=self.build_temp, macros=macros, include_dirs=ext.include_dirs, debug=self.debug, extra_postargs=extra_args, depends=depends) else: if not force: # Determine those sources that require rebuilding new_sources = [] for object, source in zip(objects, sources): depends = [source] depends.extend(ext.includes[source]) if (newer_group(depends, object, 'newer') or command_mtime > os.stat(object).st_mtime): new_sources.append(source) sources = new_sources # Forcably build those sources listed in 'sources' self.compiler.force = True for source in sources: output_dir = os.path.join(self.build_temp, os.path.dirname(source)) self.compiler.compile([source], output_dir=output_dir, macros=macros, include_dirs=ext.include_dirs, debug=self.debug, extra_postargs=extra_args) # Now link the object files together into a "shared object" -- # of course, first we have to figure out all the other things # that go into the mix. if ext.extra_objects: objects.extend(ext.extra_objects) # Setup "symbol stripping" if self.symbol_stripping == STRIP_VERSIONING: # Strip symbols via a versioning script f, mapfile = self._mkstemp(ext, '.map') f.write('{ global: ') for sym in self.get_export_symbols(ext): f.write(sym + '; ') f.write('local: *; };') f.close() link_preargs = [self.strip_command % mapfile] elif self.symbol_stripping == STRIP_EXPORTS_FILE: # Strip symbols via an exports file f, expfile = self._mkstemp(ext, '.exp') for sym in self.get_export_symbols(ext): f.write(sym + '\n') f.close() link_preargs = [self.strip_command % expfile] elif self.symbol_stripping == STRIP_EXPORTS_ARGLIST: # Strip symbols via multiple arguments symbols = self.get_export_symbols(ext) link_preargs = [ self.strip_command % sym for sym in symbols ] else: # No linker support for limiting exported symbols link_preargs = [] # Detect target language, if not provided kwords = {} if sys.version >= '2.3': lang = ext.language or self.compiler.detect_language(ext.sources) kwords['target_lang'] = lang self.compiler.link_shared_object( objects, ext_filename, libraries=self.get_libraries(ext), library_dirs=ext.library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_preargs=link_preargs, extra_postargs=ext.extra_link_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, **kwords) if self.symbol_stripping == STRIP_EXPORTS_POST_LINK: # Create the exports file f, expfile = self._mkstemp(ext, '.exp') for sym in self.get_export_symbols(ext): f.write(sym + '\n') f.close() subst = {'exports' : expfile, 'extension' : filename} self.spawn([ x % subst for x in self.strip_command.split(' ') ]) # Reset the force flag on the compilier self.compiler.force = self.force return
def build_script(self, script): """ Builds a CommandLineApp script. On POSIX systems, this is a generated shell script. For Windows, it is a compiled executable with the generated file appended to the end of the stub. """ # Get the destination filename outfile = self.get_script_filename(script) # Determine if the script needs to be built command_mtime = ImportUtil.GetLastModified(__name__) if os.name == 'nt': stub_mtime = ImportUtil.GetResourceLastModified( __name__, 'stubmain.exe') command_mtime = max(command_mtime, stub_mtime) try: target_mtime = os.stat(outfile).st_mtime except OSError: target_mtime = -1 if not (self.force or command_mtime > target_mtime): self.announce("skipping '%s' script (up-to-date)" % script.name) return else: self.announce("building '%s' script" % (script.name), 2) repl = { 'executable': self.get_python_executable(), 'command': self.get_command_name(), 'timestamp': time.asctime(), 'toplevel': script.module.split('.', 1)[0], } repl.update(vars(script)) script_body = SHELL_SCRIPT_BODY % repl if self.dry_run: # Don't actually create the script pass elif os.name == 'nt': # Populate the ScriptInfo structure script_info = ScriptInfo() script_info.Signature = 0x00005446 # "FT\0\0" script_info.MajorPythonVersion = sys.version_info[0] script_info.MinorPythonVersion = sys.version_info[1] script_info.Subsystem = 0x0003 # CUI if self.debug: script_info.Characteristics |= 0x0001 stub_bytes = ImportUtil.GetResourceString(__name__, 'stubmain.exe') script_info.ScriptAddress = len(stub_bytes) script_info.ScriptSize = len(script_body) # Write the script executable f = open(outfile, 'w+b') try: f.write(stub_bytes) f.write(script_body) ImageHlp.UpdateResource(f, ImageHlp.RT_RCDATA, 1, script_info) ImageHlp.SetSubsystem(f, ImageHlp.IMAGE_SUBSYSTEM_WINDOWS_CUI) finally: f.close() else: # Create the file with execute permissions set fd = os.open(outfile, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0755) try: os.write(fd, script_body) finally: os.close(fd) return
def build_index(self, documents): from Ft.Xml.Xslt.BuiltInExtElements import RESERVED_NAMESPACE name = 'index-' + self.distribution.get_name() xmlfile = self.get_output_filename(name) source_mtime = max(os.path.getmtime(self.distribution.script_name), os.path.getmtime(self.distribution.package_file), ImportUtil.GetLastModified(__name__)) try: target_mtime = os.path.getmtime(xmlfile) except OSError: target_mtime = -1 if not (self.force or source_mtime > target_mtime): self.announce('not creating index (up-to-date)', 1) return else: self.announce("creating index -> %s" % xmlfile, 2) index = {} index_uri = Uri.OsPathToUri(xmlfile) xmlstr = XmlFormatter.XmlRepr().escape for doc in documents: if 'noindex' not in doc.flags: output = os.path.splitext(doc.source)[0] + '.html' source_uri = Uri.OsPathToUri(doc.source) output_uri = Uri.OsPathToUri(output) category = index.setdefault(doc.category, []) category.append({ 'title' : xmlstr(doc.title), 'source' : Uri.Relativize(source_uri, index_uri), 'output' : Uri.Relativize(output_uri, index_uri), 'stylesheet' : xmlstr(doc.stylesheet), }) sections = [] for title, category, sort in ( ('General', 'general', False), ('Modules', 'modules', True), ('XPath/XSLT Extensions', 'extensions', False), ('Command-line Applications', 'commandline', True) ): if category not in index: continue items = [] L = index[category] if sort: L.sort(lambda a, b: cmp(a['title'], b['title'])) for info in L: repl = {'title' : info['title'], 'url' : info['output'], } items.append(INDEX_LISTITEM % repl) if items: # add the section if it contains any entries items = ''.join(items) repl = {'title' : xmlstr(title), 'category' : xmlstr(category), 'items' : items, } sections.append(INDEX_SECTION % repl) sections = ''.join(sections) sources = [] for category in index.values(): for info in category: sources.append(INDEX_SOURCE % info) sources = ''.join(sources) repl = {'fullname' : xmlstr(self.distribution.get_fullname()), 'sections' : sections, 'namespace' : RESERVED_NAMESPACE, 'sources' : sources, } index = INDEX_TEMPLATE % repl if not self.dry_run: f = open(xmlfile, 'wb') f.write(index) f.close() return documents