def typeToStr(self): if self.ext_type == 'instance': return 'Instance extension' if self.ext_type == 'device': return 'Device extension' if self.ext_type is not None: self.generator.logMsg('warn', 'The type attribute of ' + self.name + ' extension is neither \'instance\' nor \'device\'. That is invalid (at the time this script was written).') write(' ' + type.capitalize(), file=fp) else: # should be unreachable self.generator.logMsg('error', 'Logic error in typeToStr(): Missing type attribute!') return None
def endFile(self): # Print out all the dictionaries as Python strings. # Could just print(dict) but that's not human-readable dicts = [ [ self.basetypes, 'basetypes' ], [ self.consts, 'consts' ], [ self.enums, 'enums' ], [ self.flags, 'flags' ], [ self.funcpointers, 'funcpointers' ], [ self.protos, 'protos' ], [ self.structs, 'structs' ], [ self.handles, 'handles' ], [ self.defines, 'defines' ], [ self.typeCategory, 'typeCategory' ], [ self.alias, 'alias' ], ] for (entry_dict, name) in dicts: write(name + ' = {}', file=self.outFile) for key in sorted(entry_dict.keys()): write(name + '[' + enquote(key) + '] = ', entry_dict[key], file=self.outFile) # Dictionary containing the relationships of a type # (e.g. a dictionary with each related type as keys). write('mapDict = {}', file=self.outFile) # Could just print(self.mapDict), but prefer something # human-readable and stable-ordered for baseType in sorted(self.mapDict.keys()): write('mapDict[' + enquote(baseType) + '] = ', file=self.outFile, end='') pprint(self.mapDict[baseType], self.outFile) OutputGenerator.endFile(self)
def beginFile(self, genOpts): self.genOpts = genOpts try: os.mkdir(genOpts.filename) except FileExistsError: pass self.typesFile = open(path.join(genOpts.filename, "types.d"), "w", encoding="utf-8") self.funcsFile = open(path.join(genOpts.filename, "functions.d"), "w", encoding="utf-8") #self.testsFile = open(path.join(genOpts.filename, "test.txt"), "w", encoding="utf-8") with open(path.join(genOpts.filename, "package.d"), "w", encoding="utf-8") as packageFile: write(PACKAGE_HEADER.format(PACKAGE_PREFIX = genOpts.packagePrefix), file=packageFile) write(FUNCTIONS_HEADER.format(PACKAGE_PREFIX = genOpts.packagePrefix), file=self.funcsFile)
def endFile(self): # C-specific # Finish C++ wrapper and multiple inclusion protection self.newline() write('#ifdef __cplusplus', file=self.outFile) write('}', file=self.outFile) write('#endif', file=self.outFile) if self.genOpts.protectFile and self.genOpts.filename: self.newline() write('#endif', file=self.outFile) # Finish processing in superclass OutputGenerator.endFile(self)
def genTarget(args): global genOpts # Create generator options with specified parameters makeGenOpts(extensions = args.extension, removeExtensions = args.removeExtension, protect = args.protect, directory = args.directory) if (args.target in genOpts.keys()): createGenerator = genOpts[args.target][0] options = genOpts[args.target][1] if not args.quiet: write('* Building', options.filename, file=sys.stderr) startTimer(args.time) gen = createGenerator(errFile=errWarn, warnFile=errWarn, diagFile=diag) reg.setGenerator(gen) reg.apiGen(options) if not args.quiet: write('* Generated', options.filename, file=sys.stderr) endTimer(args.time, '* Time to generate ' + options.filename + ' =') else: write('No generator options for unknown target:', args.target, file=sys.stderr)
def genTarget(args): global genOpts # Create generator options with specified parameters makeGenOpts(extensions=args.extension, protect=args.protect, directory=args.directory) if args.target in genOpts.keys(): createGenerator = genOpts[args.target][0] options = genOpts[args.target][1] write("* Building", options.filename, file=sys.stderr) startTimer(args.time) gen = createGenerator(errFile=errWarn, warnFile=errWarn, diagFile=diag, registryFile=args.registry) reg.setGenerator(gen) reg.apiGen(options) write("* Generated", options.filename, file=sys.stderr) endTimer(args.time, "* Time to generate " + options.filename + " =") else: write("No generator options for unknown target:", args.target, file=sys.stderr)
def genHeaders(): # Loop over targets, building each generated = 0 for item in buildList: if (item == None): break createGenerator = item[0] genOpts = item[1] if (target and target != genOpts.filename): # write('*** Skipping', genOpts.filename) continue write('*** Building', genOpts.filename) generated = generated + 1 startTimer() gen = createGenerator(errFile=errWarn, warnFile=errWarn, diagFile=diag) reg.setGenerator(gen) reg.apiGen(genOpts) write('** Generated', genOpts.filename) endTimer('Time to generate ' + genOpts.filename + ' =') if (target and generated == 0): write('Failed to generate target:', target)
def writeInclude(self, directory, basename, contents): # Create subdirectory, if needed directory = self.genOpts.directory + '/' + directory self.makeDir(directory) # Create file filename = directory + '/' + basename + '.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Asciidoc anchor write(self.genOpts.conventions.warning_comment, file=fp) write('[[{0},{0}]]'.format(basename), file=fp) write('[source,c++]', file=fp) write('----', file=fp) write(contents, file=fp) write('----', file=fp) fp.close() if self.genOpts.secondaryInclude: # Create secondary no cross-reference include file filename = directory + '/' + basename + '.no-xref.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Asciidoc anchor write(self.genOpts.conventions.warning_comment, file=fp) write( '// Include this no-xref version without cross reference id for multiple includes of same file', file=fp) write('[source,c++]', file=fp) write('----', file=fp) write(contents, file=fp) write('----', file=fp) fp.close()
def endTimer(timeit, msg): global startTime endTime = time.clock() if (timeit): write(msg, endTime - startTime, file=sys.stderr) startTime = None
def writeInclude(self): if self.threadsafety['parameters'] is not None: # Create file filename = self.genOpts.directory + '/' + 'parameters.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Host Synchronization write(self.genOpts.conventions.warning_comment, file=fp) write('.Externally Synchronized Parameters', file=fp) write('****', file=fp) write(self.threadsafety['parameters'], file=fp, end='') write('****', file=fp) write('', file=fp) if self.threadsafety['parameterlists'] is not None: # Create file filename = self.genOpts.directory + '/' + '/parameterlists.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Host Synchronization write(self.genOpts.conventions.warning_comment, file=fp) write('.Externally Synchronized Parameter Lists', file=fp) write('****', file=fp) write(self.threadsafety['parameterlists'], file=fp, end='') write('****', file=fp) write('', file=fp) if self.threadsafety['implicit'] is not None: # Create file filename = self.genOpts.directory + '/' + '/implicit.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Host Synchronization write(self.genOpts.conventions.warning_comment, file=fp) write('.Implicit Externally Synchronized Parameters', file=fp) write('****', file=fp) write(self.threadsafety['implicit'], file=fp, end='') write('****', file=fp) write('', file=fp) fp.close()
def endFile( self ): # write types.d file write( TYPES_HEADER.format( PACKAGE_PREFIX = self.genOpts.packagePrefix, HEADER_VERSION = self.headerVersion ) + self.typesFileContent, file = self.typesFile ) # write functions.d file write( "}}\n\n__gshared {{{GLOBAL_FUNCTION_DEFINITIONS}\n}}\n".format( GLOBAL_FUNCTION_DEFINITIONS = self.functionTypeDefinition ), file = self.funcsFile ) write( """\ /// if not using version "with-derelict-loader" this function must be called first /// sets vkCreateInstance function pointer and acquires basic functions to retrieve information about the implementation void loadGlobalLevelFunctions( typeof( vkGetInstanceProcAddr ) getProcAddr ) { vkGetInstanceProcAddr = getProcAddr; vkEnumerateInstanceExtensionProperties = cast( typeof( vkEnumerateInstanceExtensionProperties )) vkGetInstanceProcAddr( null, "vkEnumerateInstanceExtensionProperties" ); vkEnumerateInstanceLayerProperties = cast( typeof( vkEnumerateInstanceLayerProperties )) vkGetInstanceProcAddr( null, "vkEnumerateInstanceLayerProperties" ); vkCreateInstance = cast( typeof( vkCreateInstance )) vkGetInstanceProcAddr( null, "vkCreateInstance" ); } /// with a valid VkInstance call this function to retrieve additional VkInstance, VkPhysicalDevice, ... related functions void loadInstanceLevelFunctions( VkInstance instance ) { assert( vkGetInstanceProcAddr !is null, "Must call loadGlobalLevelFunctions before loadInstanceLevelFunctions" );\ """ + self.instanceLevelFunctions + """\n\ } /// with a valid VkInstance call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call indirectly through the VkInstance and will be internally dispatched by the implementation /// use loadDeviceLevelFunctions( VkDevice device ) bellow to avoid this indirection and get the pointers directly form a VkDevice void loadDeviceLevelFunctions( VkInstance instance ) { assert( vkGetInstanceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions" );\ """ + self.deviceLevelFunctions.format( INSTANCE_OR_DEVICE = "Instance", instance_or_device = "instance" ) + """\n\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call directly VkDevice and related resources and can be retrieved for one and only one VkDevice /// calling this function again with another VkDevices will overwrite the __gshared functions retrieved previously /// use createGroupedDeviceLevelFunctions bellow if usage of multiple VkDevices is required void loadDeviceLevelFunctions( VkDevice device ) { assert( vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions" );\ """ + self.deviceLevelFunctions.format( INSTANCE_OR_DEVICE = "Device", instance_or_device = "device" ) + """\n\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions grouped in a DispatchDevice struct /// the functions call directly VkDevice and related resources and can be retrieved for any VkDevice deprecated( \"Use DispatchDevice( VkDevice ) or DispatchDevice.loadDeviceLevelFunctions( VkDevice ) instead\" ) DispatchDevice createDispatchDeviceLevelFunctions( VkDevice device ) { return DispatchDevice( device ); } // struct to group per device deviceLevelFunctions into a custom namespace // keeps track of the device to which the functions are bound struct DispatchDevice { private VkDevice device = VK_NULL_HANDLE; VkCommandBuffer commandBuffer; // return copy of the internal VkDevice VkDevice vkDevice() { return device; } // Constructor forwards parameter 'device' to 'this.loadDeviceLevelFunctions' this( VkDevice device ) { this.loadDeviceLevelFunctions( device ); } // load the device level member functions // this also sets the private member 'device' to the passed in VkDevice // now the DispatchDevice can be used e.g.: // auto dd = DispatchDevice( device ); // dd.vkDestroyDevice( dd.vkDevice, pAllocator ); // convenience functions to omit the first arg do exist, see bellow void loadDeviceLevelFunctions( VkDevice device ) { assert( vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions" ); this.device = device;\ """ + self.deviceLevelFunctions.format( INSTANCE_OR_DEVICE = "Device", instance_or_device = "device" ).replace( '\t', '\t\t' ).replace( '\t\t\t\t', '\t\t\t' ) + """\n\ } // Convenience member functions, forwarded to corresponding vulkan functions // If the first arg of the vulkan function is VkDevice it can be omitted // private 'DipatchDevice' member 'device' will be passed to the forwarded vulkan functions // the crux is that function pointers can't be overloaded with regular functions // hence the vk prefix is ditched for the convenience variants // e.g.: // auto dd = DispatchDevice( device ); // dd.DestroyDevice( pAllocator ); // instead of: dd.vkDestroyDevice( dd.vkDevice, pAllocator ); // // Same mechanism works with functions which require a VkCommandBuffer as first arg // In this case the public member 'commandBuffer' must be set beforehand // e.g.: // dd.commandBuffer = some_command_buffer; // dd.BeginCommandBuffer( &beginInfo ); // dd.CmdBindPipeline( VK_PIPELINE_BIND_POINT_GRAPHICS, some_pipeline ); // // Does not work with queues, there are just too few queue related functions""" + self.dispatchConvenienceFunctions + """\n\ // Member vulkan function decelerations{DISPATCH_FUNCTION_DEFINITIONS} }} // Derelict loader to acquire entry point vkGetInstanceProcAddr version( {NAME_PREFIX_UCASE}_FROM_DERELICT ) {{ import derelict.util.loader; import derelict.util.system; private {{ version( Windows ) enum libNames = "vulkan-1.dll"; else version( Posix ) enum libNames = "libvulkan.so.1"; else static assert( 0,"Need to implement Vulkan libNames for this operating system." ); }} class Derelict{NAME_PREFIX}Loader : SharedLibLoader {{ this() {{ super( libNames ); }} protected override void loadSymbols() {{ typeof( vkGetInstanceProcAddr ) getProcAddr; bindFunc( cast( void** )&getProcAddr, "vkGetInstanceProcAddr" ); loadGlobalLevelFunctions( getProcAddr ); }} }} __gshared Derelict{NAME_PREFIX}Loader Derelict{NAME_PREFIX}; shared static this() {{ Derelict{NAME_PREFIX} = new Derelict{NAME_PREFIX}Loader(); }} }} """.format( NAME_PREFIX = self.genOpts.namePrefix, NAME_PREFIX_UCASE = self.genOpts.namePrefix.upper(), DISPATCH_FUNCTION_DEFINITIONS = self.dispatchTypeDefinition ), file = self.funcsFile ) self.typesFile.close() self.funcsFile.close()
# Load & parse registry reg = Registry() startTimer(args.time) tree = etree.parse(args.registry) endTimer(args.time, "* Time to make ElementTree =") startTimer(args.time) reg.loadElementTree(tree) endTimer(args.time, "* Time to parse ElementTree =") if args.validate: reg.validateGroups() if args.dump: write("* Dumping registry to regdump.txt", file=sys.stderr) reg.dumpReg(filehandle=open("regdump.txt", "w")) # create error/warning & diagnostic files if args.errfile: errWarn = open(args.errfile, "w") else: errWarn = sys.stderr if args.diagfile: diag = open(args.diagfile, "w") else: diag = None if args.debug: pdb.run("genTarget(args)")
def endTimer(msg): global startTime endTime = time.clock() if (timeit): write(msg, endTime - startTime) startTime = None
def endFile(self): self.extensions.sort() for ext in self.extensions: ext.makeMetafile(self.extensions) promotedExtensions = {} for ext in self.extensions: if ext.deprecationType == 'promotion' and ext.supercedingAPIVersion: promotedExtensions.setdefault(ext.supercedingAPIVersion, []).append(ext) for coreVersion, extensions in promotedExtensions.items(): promoted_extensions_fp = self.newFile(self.directory + '/promoted_extensions_' + coreVersion + self.file_suffix) for ext in extensions: indent = '' write(' * {blank}\n+\n' + ext.conditionalLinkExt(ext.name, indent), file=promoted_extensions_fp) promoted_extensions_fp.close() current_extensions_appendix_fp = self.newFile(self.directory + '/current_extensions_appendix' + self.file_suffix) deprecated_extensions_appendix_fp = self.newFile(self.directory + '/deprecated_extensions_appendix' + self.file_suffix) current_extension_appendices_fp = self.newFile(self.directory + '/current_extension_appendices' + self.file_suffix) current_extension_appendices_toc_fp = self.newFile(self.directory + '/current_extension_appendices_toc' + self.file_suffix) deprecated_extension_appendices_fp = self.newFile(self.directory + '/deprecated_extension_appendices' + self.file_suffix) deprecated_extension_appendices_toc_fp = self.newFile(self.directory + '/deprecated_extension_appendices_toc' + self.file_suffix) deprecated_extensions_guard_macro_fp = self.newFile(self.directory + '/deprecated_extensions_guard_macro' + self.file_suffix) provisional_extensions_appendix_fp = self.newFile(self.directory + '/provisional_extensions_appendix' + self.file_suffix) provisional_extension_appendices_fp = self.newFile(self.directory + '/provisional_extension_appendices' + self.file_suffix) provisional_extension_appendices_toc_fp = self.newFile(self.directory + '/provisional_extension_appendices_toc' + self.file_suffix) provisional_extensions_guard_macro_fp = self.newFile(self.directory + '/provisional_extensions_guard_macro' + self.file_suffix) write('include::deprecated_extensions_guard_macro' + self.file_suffix + '[]', file=current_extensions_appendix_fp) write('', file=current_extensions_appendix_fp) write('ifndef::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('[[extension-appendices-list]]', file=current_extensions_appendix_fp) write('== List of Extensions', file=current_extensions_appendix_fp) write('endif::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('ifdef::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('[[extension-appendices-list]]', file=current_extensions_appendix_fp) write('== List of Current Extensions', file=current_extensions_appendix_fp) write('endif::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('', file=current_extensions_appendix_fp) write('include::current_extension_appendices_toc' + self.file_suffix + '[]', file=current_extensions_appendix_fp) write('<<<', file=current_extensions_appendix_fp) write('include::current_extension_appendices' + self.file_suffix + '[]', file=current_extensions_appendix_fp) write('include::deprecated_extensions_guard_macro' + self.file_suffix + '[]', file=deprecated_extensions_appendix_fp) write('', file=deprecated_extensions_appendix_fp) write('ifdef::HAS_DEPRECATED_EXTENSIONS[]', file=deprecated_extensions_appendix_fp) write('[[deprecated-extension-appendices-list]]', file=deprecated_extensions_appendix_fp) write('== List of Deprecated Extensions', file=deprecated_extensions_appendix_fp) write('include::deprecated_extension_appendices_toc' + self.file_suffix + '[]', file=deprecated_extensions_appendix_fp) write('<<<', file=deprecated_extensions_appendix_fp) write('include::deprecated_extension_appendices' + self.file_suffix + '[]', file=deprecated_extensions_appendix_fp) write('endif::HAS_DEPRECATED_EXTENSIONS[]', file=deprecated_extensions_appendix_fp) # add include guard to allow multiple includes write('ifndef::DEPRECATED_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD[]', file=deprecated_extensions_guard_macro_fp) write(':DEPRECATED_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD:\n', file=deprecated_extensions_guard_macro_fp) write('ifndef::PROVISIONAL_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD[]', file=provisional_extensions_guard_macro_fp) write(':PROVISIONAL_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD:\n', file=provisional_extensions_guard_macro_fp) write('include::provisional_extensions_guard_macro' + self.file_suffix + '[]', file=provisional_extensions_appendix_fp) write('', file=provisional_extensions_appendix_fp) write('ifdef::HAS_PROVISIONAL_EXTENSIONS[]', file=provisional_extensions_appendix_fp) write('[[provisional-extension-appendices-list]]', file=provisional_extensions_appendix_fp) write('== List of Provisional Extensions', file=provisional_extensions_appendix_fp) write('include::provisional_extension_appendices_toc' + self.file_suffix + '[]', file=provisional_extensions_appendix_fp) write('<<<', file=provisional_extensions_appendix_fp) write('include::provisional_extension_appendices' + self.file_suffix + '[]', file=provisional_extensions_appendix_fp) write('endif::HAS_PROVISIONAL_EXTENSIONS[]', file=provisional_extensions_appendix_fp) for ext in self.extensions: include = 'include::../' + ext.name + self.file_suffix + '[]' link = ' * <<' + ext.name + '>>' if ext.provisional == 'true': write(self.conditionalExt(ext.name, include), file=provisional_extension_appendices_fp) write(self.conditionalExt(ext.name, link), file=provisional_extension_appendices_toc_fp) write(self.conditionalExt(ext.name, ':HAS_PROVISIONAL_EXTENSIONS:'), file=provisional_extensions_guard_macro_fp) elif ext.deprecationType is None: write(self.conditionalExt(ext.name, include), file=current_extension_appendices_fp) write(self.conditionalExt(ext.name, link), file=current_extension_appendices_toc_fp) else: condition = ext.supercedingAPIVersion if ext.supercedingAPIVersion else ext.supercedingExtension # potentially None too write(self.conditionalExt(ext.name, include, 'ifndef', condition), file=current_extension_appendices_fp) write(self.conditionalExt(ext.name, link, 'ifndef', condition), file=current_extension_appendices_toc_fp) write(self.conditionalExt(ext.name, include, 'ifdef', condition), file=deprecated_extension_appendices_fp) write(self.conditionalExt(ext.name, link, 'ifdef', condition), file=deprecated_extension_appendices_toc_fp) write(self.conditionalExt(ext.name, ':HAS_DEPRECATED_EXTENSIONS:', 'ifdef', condition), file=deprecated_extensions_guard_macro_fp) current_extensions_appendix_fp.close() deprecated_extensions_appendix_fp.close() current_extension_appendices_fp.close() current_extension_appendices_toc_fp.close() deprecated_extension_appendices_fp.close() deprecated_extension_appendices_toc_fp.close() write('endif::DEPRECATED_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD[]', file=deprecated_extensions_guard_macro_fp) deprecated_extensions_guard_macro_fp.close() OutputGenerator.endFile(self)
def makeMetafile(self, extensionsList, isRefpage=False): """Generate a file containing extension metainformation in asciidoctor markup form. - extensionsList - list of extensions spec is being generated against - isRefpage - True if generating a refpage include, False if generating a specification extension appendix include""" if isRefpage: filename = self.filename.replace('meta/', 'meta/refpage.') else: filename = self.filename fp = self.generator.newFile(filename) if not isRefpage: write('[[' + self.name + ']]', file=fp) write('=== ' + self.name, file=fp) write('', file=fp) self.writeTag('Name String', '`' + self.name + '`', isRefpage, fp) self.writeTag('Extension Type', self.typeToStr(), isRefpage, fp) self.writeTag('Registered Extension Number', self.number, isRefpage, fp) self.writeTag('Revision', self.revision, isRefpage, fp) # Only API extension dependencies are coded in XML, others are explicit self.writeTag('Extension and Version Dependencies', None, isRefpage, fp) write(' * Requires ' + self.conventions.api_name() + ' ' + self.requiresCore, file=fp) if self.requires: for dep in self.requires.split(','): write(' * Requires', self.conventions.formatExtension(dep), file=fp) if self.provisional == 'true': write( ' * *This is a _provisional_ extension and must: be used with caution.', file=fp) write( ' See the ' + self.specLink(xrefName='boilerplate-provisional-header', xrefText='description', isRefpage=isRefpage) + ' of provisional header files for enablement and stability details.*', file=fp) write('', file=fp) if self.deprecationType: self.writeTag('Deprecation state', None, isRefpage, fp) if self.deprecationType == 'promotion': if self.supercedingAPIVersion: write(' * _Promoted_ to\n' + self.conditionalLinkCoreAPI( self.supercedingAPIVersion, '-promotions', isRefpage), file=fp) else: # ext.supercedingExtension write(' * _Promoted_ to\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension', file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, isRefpage, fp) elif self.deprecationType == 'deprecation': if self.supercedingAPIVersion: write(' * _Deprecated_ by\n' + self.conditionalLinkCoreAPI( self.supercedingAPIVersion, '-new-features', isRefpage), file=fp) elif self.supercedingExtension: write(' * _Deprecated_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension', file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, isRefpage, fp) else: write(' * _Deprecated_ without replacement', file=fp) elif self.deprecationType == 'obsoletion': if self.supercedingAPIVersion: write(' * _Obsoleted_ by\n' + self.conditionalLinkCoreAPI( self.supercedingAPIVersion, '-new-features', isRefpage), file=fp) elif self.supercedingExtension: write(' * _Obsoleted_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension', file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, isRefpage, fp) else: # TODO: Does not make sense to retroactively ban use of extensions from 1.0. # Needs some tweaks to the semantics and this message, when such extension(s) occur. write(' * _Obsoleted_ without replacement', file=fp) else: # should be unreachable self.generator.logMsg( 'error', 'Logic error in makeMetafile(): deprecationType is neither \'promotion\', \'deprecation\' nor \'obsoletion\'!' ) if self.specialuse is not None: specialuses = self.specialuse.split(',') if len(specialuses) > 1: header = 'Special Uses' else: header = 'Special Use' self.writeTag(header, None, isRefpage, fp) for use in specialuses: # Each specialuse attribute value expands an asciidoctor # attribute of the same name, instead of using the shorter, # and harder to understand attribute write('* {}'.format( self.specLink( xrefName=self.conventions.special_use_section_anchor, xrefText='{' + use + '}', isRefpage=isRefpage)), file=fp) if self.conventions.write_contacts and not isRefpage: write('*Contact*::', file=fp) contacts = self.contact.split(',') for contact in contacts: contactWords = contact.strip().split() name = ' '.join(contactWords[:-1]) handle = contactWords[-1] if handle.startswith('gitlab:'): prettyHandle = 'icon:gitlab[alt=GitLab, role="red"]' + handle.replace( 'gitlab:@', '') elif handle.startswith('@'): trackerLink = 'link:++https://github.com/KhronosGroup/Vulkan-Docs/issues/new?title=' + self.name + ':%20&body=' + handle + '%20++' prettyHandle = trackerLink + '[icon:github[alt=GitHub, role="black"]' + handle[ 1:] + ']' else: prettyHandle = handle write(' * ' + name + ' ' + prettyHandle, file=fp) fp.close()
def newFile(self, filename): self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') write(self.genOpts.conventions.warning_comment, file=fp) return fp
def outputExtensionInclude(self, name, vendor, deprecation_type, conditions, current_fp, current_toc_fp, deprecated_fp, deprecated_toc_fp, guard_fp): include = 'include::../' + vendor.lower() + '/' + name[3:].lower() + '' + self.file_suffix + '[]' link = ' * <<' + name + '>>' if deprecation_type is None: write(self.conditionalExt(name, include), file=current_fp) write(self.conditionalExt(name, link), file=current_toc_fp) else: write(self.conditionalExt(name, include, 'ifndef', conditions), file=current_fp) write(self.conditionalExt(name, link, 'ifndef', conditions), file=current_toc_fp) write(self.conditionalExt(name, include, 'ifdef', conditions), file=deprecated_fp) write(self.conditionalExt(name, link, 'ifdef', conditions), file=deprecated_toc_fp) write(self.conditionalExt(name, ':HAS_DEPRECATED_EXTENSIONS:', 'ifdef', conditions), file=guard_fp)
def makeMetafile(self, extensionsList): fp = self.generator.newFile(self.filename) write('[[' + self.name + ']]', file=fp) write('=== ' + self.name, file=fp) write('', file=fp) write('*Name String*::', file=fp) write(' `' + self.name + '`', file=fp) write('*Extension Type*::', file=fp) write(' ' + self.typeToStr(), file=fp) write('*Registered Extension Number*::', file=fp) write(' ' + self.number, file=fp) write('*Revision*::', file=fp) write(' ' + self.revision, file=fp) # Only API extension dependencies are coded in XML, others are explicit write('*Extension and Version Dependencies*::', file=fp) write(' * Requires ' + self.conventions.api_name + ' ' + self.requiresCore, file=fp) if self.requires: for dep in self.requires.split(','): write(' * Requires `<<' + dep + '>>`', file=fp) if self.deprecationType: write('*Deprecation state*::', file=fp) if self.deprecationType == 'promotion': if self.supercedingAPIVersion: write(' * _Promoted_ to\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-promotions'), file=fp) else: # ext.supercedingExtension write(' * _Promoted_ to\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension', file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) elif self.deprecationType == 'deprecation': if self.supercedingAPIVersion: write(' * _Deprecated_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Deprecated_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: write(' * _Deprecated_ without replacement' , file=fp) elif self.deprecationType == 'obsoletion': if self.supercedingAPIVersion: write(' * _Obsoleted_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Obsoleted_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: # TODO: Does not make sense to retroactively ban use of extensions from 1.0. # Needs some tweaks to the semantics and this message, when such extension(s) occur. write(' * _Obsoleted_ without replacement' , file=fp) else: # should be unreachable self.generator.logMsg('error', 'Logic error in makeMetafile(): deprecationType is neither \'promotion\', \'deprecation\' nor \'obsoletion\'!') if self.conventions.write_contacts: write('*Contact*::', file=fp) contacts = self.contact.split(',') for contact in contacts: contactWords = contact.strip().split() name = ' '.join(contactWords[:-1]) handle = contactWords[-1] if handle.startswith('gitlab:'): prettyHandle = 'icon:gitlab[alt=GitLab, role="red"]' + handle.replace('gitlab:@', '') elif handle.startswith('@'): trackerLink = 'link:++https://github.com/KhronosGroup/Vulkan-Docs/issues/new?title=' + self.name + ':%20&body=' + handle + '%20++' prettyHandle = trackerLink + '[icon:github[alt=GitHub, role="black"]' + handle[1:] + ']' else: prettyHandle = handle write(' * ' + name + ' ' + prettyHandle, file=fp) fp.close() if self.conventions.write_refpage_include: # Now make the refpage include fp = self.generator.newFile(self.filename.replace('meta/', 'meta/refpage.')) write('== Registered Extension Number', file=fp) write(self.number, file=fp) write('', file=fp) write('== Revision', file=fp) write(self.revision, file=fp) write('', file=fp) # Only API extension dependencies are coded in XML, others are explicit write('== Extension and Version Dependencies', file=fp) write(' * Requires ' + self.conventions.api_name + ' ' + self.requiresCore, file=fp) if self.requires: for dep in self.requires.split(','): write(' * Requires `<<' + dep + '>>`', file=fp) write('', file=fp) if self.deprecationType: write('== Deprecation state', file=fp) if self.deprecationType == 'promotion': if self.supercedingAPIVersion: write(' * _Promoted_ to\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-promotions'), file=fp) else: # ext.supercedingExtension write(' * _Promoted_ to\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension', file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) elif self.deprecationType == 'deprecation': if self.supercedingAPIVersion: write(' * _Deprecated_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Deprecated_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: write(' * _Deprecated_ without replacement' , file=fp) elif self.deprecationType == 'obsoletion': if self.supercedingAPIVersion: write(' * _Obsoleted_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Obsoleted_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: # TODO: Does not make sense to retroactively ban use of extensions from 1.0. # Needs some tweaks to the semantics and this message, when such extension(s) occur. write(' * _Obsoleted_ without replacement' , file=fp) else: # should be unreachable self.generator.logMsg('error', 'Logic error in makeMetafile(): deprecationType is neither \'promotion\', \'deprecation\' nor \'obsoletion\'!') write('', file=fp) fp.close()
def resolveDeprecationChain(self, extensionsList, succeededBy, file): ext = next(x for x in extensionsList if x.name == succeededBy) if ext.deprecationType: if ext.deprecationType == 'promotion': if ext.supercedingAPIVersion: write(' ** Which in turn was _promoted_ to\n' + ext.conditionalLinkCoreAPI(ext.supercedingAPIVersion, '-promotions'), file=file) else: # ext.supercedingExtension write(' ** Which in turn was _promoted_ to extension\n' + ext.conditionalLinkExt(ext.supercedingExtension), file=file) ext.resolveDeprecationChain(extensionsList, ext.supercedingExtension, file) elif ext.deprecationType == 'deprecation': if ext.supercedingAPIVersion: write(' ** Which in turn was _deprecated_ by\n' + ext.conditionalLinkCoreAPI(ext.supercedingAPIVersion, '-new-feature'), file=file) elif ext.supercedingExtension: write(' ** Which in turn was _deprecated_ by\n' + ext.conditionalLinkExt(ext.supercedingExtension) + ' extension', file=file) ext.resolveDeprecationChain(extensionsList, ext.supercedingExtension, file) else: write(' ** Which in turn was _deprecated_ without replacement', file=file) elif ext.deprecationType == 'obsoletion': if ext.supercedingAPIVersion: write(' ** Which in turn was _obsoleted_ by\n' + ext.conditionalLinkCoreAPI(ext.supercedingAPIVersion, '-new-feature'), file=file) elif ext.supercedingExtension: write(' ** Which in turn was _obsoleted_ by\n' + ext.conditionalLinkExt(ext.supercedingExtension) + ' extension', file=file) ext.resolveDeprecationChain(extensionsList, ext.supercedingExtension, file) else: write(' ** Which in turn was _obsoleted_ without replacement', file=file) else: # should be unreachable self.generator.logMsg('error', 'Logic error in resolveDeprecationChain(): deprecationType is neither \'promotion\', \'deprecation\' nor \'obsoletion\'!')
def beginFile(self, genOpts): OutputGenerator.beginFile(self, genOpts) # C-specific # # Multiple inclusion protection & C++ wrappers. if genOpts.protectFile and self.genOpts.filename: headerSym = re.sub(r'\.h', '_h_', os.path.basename(self.genOpts.filename)).upper() write('#ifndef', headerSym, file=self.outFile) write('#define', headerSym, '1', file=self.outFile) self.newline() write('#ifdef __cplusplus', file=self.outFile) write('extern "C" {', file=self.outFile) write('#endif', file=self.outFile) self.newline() # User-supplied prefix text, if any (list of strings) if genOpts.prefixText: for s in genOpts.prefixText: write(s, file=self.outFile)
def makeMetafile(self, extensionsList): fp = self.generator.newFile(self.filename) write('[[' + self.name + ']]', file=fp) write('=== ' + self.name, file=fp) write('', file=fp) write('*Name String*::', file=fp) write(' `' + self.name + '`', file=fp) write('*Extension Type*::', file=fp) write(' ' + self.typeToStr(), file=fp) write('*Registered Extension Number*::', file=fp) write(' ' + self.number, file=fp) write('*Revision*::', file=fp) write(' ' + self.revision, file=fp) # Only API extension dependencies are coded in XML, others are explicit write('*Extension and Version Dependencies*::', file=fp) write(' * Requires ' + self.conventions.api_name() + ' ' + self.requiresCore, file=fp) if self.requires: for dep in self.requires.split(','): write(' * Requires `<<' + dep + '>>`', file=fp) if self.deprecationType: write('*Deprecation state*::', file=fp) if self.deprecationType == 'promotion': if self.supercedingAPIVersion: write(' * _Promoted_ to\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-promotions'), file=fp) else: # ext.supercedingExtension write(' * _Promoted_ to\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension', file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) elif self.deprecationType == 'deprecation': if self.supercedingAPIVersion: write(' * _Deprecated_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Deprecated_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: write(' * _Deprecated_ without replacement' , file=fp) elif self.deprecationType == 'obsoletion': if self.supercedingAPIVersion: write(' * _Obsoleted_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Obsoleted_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: # TODO: Does not make sense to retroactively ban use of extensions from 1.0. # Needs some tweaks to the semantics and this message, when such extension(s) occur. write(' * _Obsoleted_ without replacement' , file=fp) else: # should be unreachable self.generator.logMsg('error', 'Logic error in makeMetafile(): deprecationType is neither \'promotion\', \'deprecation\' nor \'obsoletion\'!') if self.conventions.write_contacts: write('*Contact*::', file=fp) contacts = self.contact.split(',') for contact in contacts: contactWords = contact.strip().split() name = ' '.join(contactWords[:-1]) handle = contactWords[-1] if handle.startswith('gitlab:'): prettyHandle = 'icon:gitlab[alt=GitLab, role="red"]' + handle.replace('gitlab:@', '') elif handle.startswith('@'): trackerLink = 'link:++https://github.com/KhronosGroup/Vulkan-Docs/issues/new?title=' + self.name + ':%20&body=' + handle + '%20++' prettyHandle = trackerLink + '[icon:github[alt=GitHub, role="black"]' + handle[1:] + ']' else: prettyHandle = handle write(' * ' + name + ' ' + prettyHandle, file=fp) fp.close() if self.conventions.write_refpage_include: # Now make the refpage include fp = self.generator.newFile(self.filename.replace('meta/', 'meta/refpage.')) write('== Registered Extension Number', file=fp) write(self.number, file=fp) write('', file=fp) write('== Revision', file=fp) write(self.revision, file=fp) write('', file=fp) # Only API extension dependencies are coded in XML, others are explicit write('== Extension and Version Dependencies', file=fp) write(' * Requires ' + self.conventions.api_name() + ' ' + self.requiresCore, file=fp) if self.requires: for dep in self.requires.split(','): write(' * Requires `<<' + dep + '>>`', file=fp) write('', file=fp) if self.deprecationType: write('== Deprecation state', file=fp) if self.deprecationType == 'promotion': if self.supercedingAPIVersion: write(' * _Promoted_ to\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-promotions'), file=fp) else: # ext.supercedingExtension write(' * _Promoted_ to\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension', file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) elif self.deprecationType == 'deprecation': if self.supercedingAPIVersion: write(' * _Deprecated_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Deprecated_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: write(' * _Deprecated_ without replacement' , file=fp) elif self.deprecationType == 'obsoletion': if self.supercedingAPIVersion: write(' * _Obsoleted_ by\n' + self.conditionalLinkCoreAPI(self.supercedingAPIVersion, '-new-features'), file=fp) elif self.supercedingExtension: write(' * _Obsoleted_ by\n' + self.conditionalLinkExt(self.supercedingExtension) + ' extension' , file=fp) self.resolveDeprecationChain(extensionsList, self.supercedingExtension, fp) else: # TODO: Does not make sense to retroactively ban use of extensions from 1.0. # Needs some tweaks to the semantics and this message, when such extension(s) occur. write(' * _Obsoleted_ without replacement' , file=fp) else: # should be unreachable self.generator.logMsg('error', 'Logic error in makeMetafile(): deprecationType is neither \'promotion\', \'deprecation\' nor \'obsoletion\'!') write('', file=fp) fp.close()
def genTarget(args): # Create generator options with specified parameters makeGenOpts(args) if args.target in genOpts: createGenerator = genOpts[args.target][0] options = genOpts[args.target][1] if not args.quiet: write('* Building', options.filename, file=sys.stderr) write('* options.versions =', options.versions, file=sys.stderr) write('* options.emitversions =', options.emitversions, file=sys.stderr) write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr) write('* options.addExtensions =', options.addExtensions, file=sys.stderr) write('* options.removeExtensions =', options.removeExtensions, file=sys.stderr) write('* options.emitExtensions =', options.emitExtensions, file=sys.stderr) gen = createGenerator(errFile=errWarn, warnFile=errWarn, diagFile=diag) return (gen, options) else: write('No generator options for unknown target:', args.target, file=sys.stderr) return None
def genTarget(args): """Generate a target based on the options in the matching genOpts{} object. This is encapsulated in a function so it can be profiled and/or timed. The args parameter is an parsed argument object containing the following fields that are used: - target - target to generate - directory - directory to generate it in - protect - True if re-inclusion wrappers should be created - extensions - list of additional extensions to include in generated interfaces""" # Create generator options with specified parameters makeGenOpts(args) if args.target in genOpts: createGenerator = genOpts[args.target][0] options = genOpts[args.target][1] if not args.quiet: write('* Building', options.filename, file=sys.stderr) write('* options.versions =', options.versions, file=sys.stderr) write('* options.emitversions =', options.emitversions, file=sys.stderr) write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr) write('* options.addExtensions =', options.addExtensions, file=sys.stderr) write('* options.removeExtensions =', options.removeExtensions, file=sys.stderr) write('* options.emitExtensions =', options.emitExtensions, file=sys.stderr) startTimer(args.time) gen = createGenerator(errFile=errWarn, warnFile=errWarn, diagFile=diag) reg.setGenerator(gen) reg.apiGen(options) if not args.quiet: write('* Generated', options.filename, file=sys.stderr) endTimer(args.time, '* Time to generate ' + options.filename + ' =') else: write('No generator options for unknown target:', args.target, file=sys.stderr)
def endTimer(timeit, msg): global startTime if timeit: endTime = time.process_time() write(msg, endTime - startTime, file=sys.stderr) startTime = None
def endFile(self): # Find all of the extensions that use the system types self.sysTypes = set() for node in self.registry.reg.find('types').findall('type'): if node.get('category') is None and node.get('requires') in self.includes and node.get('requires') != 'vk_platform' or \ (node.find('name') is not None and node.find('name').text in DEFINE_TYPES): #Handle system types that are '#define'd in spec for extension in self.extTypes: for structName in self.extTypes[extension].vktypes: for struct in self.structs: if struct.name == structName: for member in struct.members: if node.get( 'name' ) == member.baseType or node.get( 'name') + '*' == member.baseType: sysType = VulkanSystemType( node.get('name'), self.extTypes[structName]) if sysType not in self.sysTypes: self.sysTypes.add(sysType) for funcName in self.extTypes[extension].vkfuncs: for func in self.functions: if func.name == funcName: for param in func.parameters: if node.get( 'name' ) == param.baseType or node.get( 'name') + '*' == param.baseType: sysType = VulkanSystemType( node.get('name'), self.extFuncs[funcName]) if sysType not in self.sysTypes: self.sysTypes.add(sysType) # Find every @foreach, @if, and @end forIter = re.finditer( '(^\\s*\\@foreach\\s+[a-z]+(\\s+where\\(.*\\))?\\s*^)|(\\@foreach [a-z]+(\\s+where\\(.*\\))?\\b)', self.format, flags=re.MULTILINE) ifIter = re.finditer('(^\\s*\\@if\\(.*\\)\\s*^)|(\\@if\\(.*\\))', self.format, flags=re.MULTILINE) endIter = re.finditer( '(^\\s*\\@end\\s+[a-z]+\\s*^)|(\\@end [a-z]+\\b)', self.format, flags=re.MULTILINE) try: nextFor = next(forIter) except StopIteration: nextFor = None try: nextIf = next(ifIter) except StopIteration: nextIf = None try: nextEnd = next(endIter) except StopIteration: nextEnd = None # Match the beginnings to the ends loops = [] unassignedControls = [] depth = 0 while nextFor is not None or nextFor is not None or nextEnd is not None: # If this is a @foreach if nextFor is not None and ( (nextIf is None or nextFor.start() < nextIf.start()) and nextFor.start() < nextEnd.start()): depth += 1 forType = re.search('(?<=\\s)[a-z]+', self.format[nextFor.start():nextFor.end()]) text = self.format[forType.start() + nextFor.start():forType.end() + nextFor.start()] whereMatch = re.search( '(?<=where\\().*(?=\\))', self.format[nextFor.start():nextFor.end()]) condition = None if whereMatch is None else self.format[ whereMatch.start() + nextFor.start():whereMatch.end() + nextFor.start()] unassignedControls.append( (nextFor.start(), nextFor.end(), text, condition)) try: nextFor = next(forIter) except StopIteration: nextFor = None # If this is an @if elif nextIf is not None and nextIf.start() < nextEnd.start(): depth += 1 condMatch = re.search('(?<=if\\().*(?=\\))', self.format[nextIf.start():nextIf.end()]) condition = None if condMatch is None else self.format[ condMatch.start() + nextIf.start():condMatch.end() + nextIf.start()] unassignedControls.append( (nextIf.start(), nextIf.end(), 'if', condition)) try: nextIf = next(ifIter) except StopIteration: nextIf = None # Else this is an @end else: depth -= 1 endType = re.search('(?<=\\s)[a-z]+', self.format[nextEnd.start():nextEnd.end()]) text = self.format[endType.start() + nextEnd.start():endType.end() + nextEnd.start()] start = unassignedControls.pop(-1) assert (start[2] == text) item = Control(self.format, start[0:2], (nextEnd.start(), nextEnd.end()), text, start[3]) if len(loops) < 1 or depth < loops[-1][0]: while len(loops) > 0 and depth < loops[-1][0]: item.children.insert(0, loops.pop(-1)[1]) loops.append((depth, item)) else: loops.append((depth, item)) try: nextEnd = next(endIter) except StopIteration: nextEnd = None # Expand each loop into its full form lastIndex = 0 for _, loop in loops: gen.write(self.format[lastIndex:loop.startPos[0]].format(**{}), file=self.outFile) gen.write(self.expand(loop), file=self.outFile) lastIndex = loop.endPos[1] gen.write(self.format[lastIndex:-1].format(**{}), file=self.outFile) gen.OutputGenerator.endFile(self)
protect = True target = None timeit = False validate= False # Default input / log files errFilename = None diagFilename = 'diag.txt' regFilename = 'vk.xml' if __name__ == '__main__': i = 1 while (i < len(sys.argv)): arg = sys.argv[i] i = i + 1 if (arg == '-debug'): write('Enabling debug (-debug)', file=sys.stderr) debug = True elif (arg == '-dump'): write('Enabling dump (-dump)', file=sys.stderr) dump = True elif (arg == '-noprotect'): write('Disabling inclusion protection in output headers', file=sys.stderr) protect = False elif (arg == '-profile'): write('Enabling profiling (-profile)', file=sys.stderr) profile = True elif (arg == '-registry'): regFilename = sys.argv[i] i = i+1 write('Using registry ', regFilename, file=sys.stderr) elif (arg == '-time'):
def endFeature(self): "Actually write the interface to the output file." # C-specific if self.emit: if self.feature_not_empty: if self.genOpts.conventions.writeFeature( self.featureExtraProtect, self.genOpts.filename): self.newline() if self.genOpts.protectFeature: write('#ifndef', self.featureName, file=self.outFile) # If type declarations are needed by other features based on # this one, it may be necessary to suppress the ExtraProtect, # or move it below the 'for section...' loop. if self.featureExtraProtect is not None: write('#ifdef', self.featureExtraProtect, file=self.outFile) self.newline() write('#define', self.featureName, '1', file=self.outFile) for section in self.TYPE_SECTIONS: contents = self.sections[section] if contents: write('\n'.join(contents), file=self.outFile) if self.genOpts.genFuncPointers and self.sections[ 'commandPointer']: write('\n'.join(self.sections['commandPointer']), file=self.outFile) self.newline() if self.sections['command']: if self.genOpts.protectProto: write(self.genOpts.protectProto, self.genOpts.protectProtoStr, file=self.outFile) write('\n'.join(self.sections['command']), end='', file=self.outFile) if self.genOpts.protectProto: write('#endif', file=self.outFile) else: self.newline() if self.featureExtraProtect is not None: write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile) if self.genOpts.protectFeature: write('#endif /*', self.featureName, '*/', file=self.outFile) # Finish processing in superclass OutputGenerator.endFeature(self)
def writeInclude(self, directory, basename, contents): # Create subdirectory, if needed directory = self.genOpts.directory + '/' + directory self.makeDir(directory) # Create file filename = directory + '/' + basename + '.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Asciidoc anchor write(self.genOpts.conventions.warning_comment, file=fp) write('[[{0},{0}]]'.format(basename), file=fp) write('[source,c++]', file=fp) write('----', file=fp) write(contents, file=fp) write('----', file=fp) fp.close() if self.genOpts.secondaryInclude: # Create secondary no cross-reference include file filename = directory + '/' + basename + '.no-xref.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Asciidoc anchor write(self.genOpts.conventions.warning_comment, file=fp) write('// Include this no-xref version without cross reference id for multiple includes of same file', file=fp) write('[source,c++]', file=fp) write('----', file=fp) write(contents, file=fp) write('----', file=fp) fp.close()
def genTarget(args): global genOpts # Create generator options with parameters specified on command line makeGenOpts(args) # Select a generator matching the requested target if (args.target in genOpts.keys()): createGenerator = genOpts[args.target][0] options = genOpts[args.target][1] if not args.quiet: write('* Building', options.filename, file=sys.stderr) write('* options.versions =', options.versions, file=sys.stderr) write('* options.emitversions =', options.emitversions, file=sys.stderr) write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr) write('* options.addExtensions =', options.addExtensions, file=sys.stderr) write('* options.removeExtensions =', options.removeExtensions, file=sys.stderr) write('* options.emitExtensions =', options.emitExtensions, file=sys.stderr) gen = createGenerator(errFile=errWarn, warnFile=errWarn, diagFile=diag) if not args.quiet: write('* Generated', options.filename, file=sys.stderr) return (gen, options) else: write('No generator options for unknown target:', args.target, file=sys.stderr) return none
def genTarget(args): global genOpts # Create generator options with specified parameters makeGenOpts(args) if (args.target in genOpts.keys()): createGenerator = genOpts[args.target][0] options = genOpts[args.target][1] if not args.quiet: write('* Building', options.filename, file=sys.stderr) write('* options.versions =', options.versions, file=sys.stderr) write('* options.emitversions =', options.emitversions, file=sys.stderr) write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr) write('* options.addExtensions =', options.addExtensions, file=sys.stderr) write('* options.removeExtensions =', options.removeExtensions, file=sys.stderr) write('* options.emitExtensions =', options.emitExtensions, file=sys.stderr) startTimer(args.time) gen = createGenerator(errFile=errWarn, warnFile=errWarn, diagFile=diag) reg.setGenerator(gen) reg.apiGen(options) if not args.quiet: write('* Generated', options.filename, file=sys.stderr) endTimer(args.time, '* Time to generate ' + options.filename + ' =') else: write('No generator options for unknown target:', args.target, file=sys.stderr)
def endFeature(self): # C-specific # Actually write the interface to the output file. if self.emit: if self.feature_not_empty: if self.genOpts.conventions.writeFeature(self.featureExtraProtect, self.genOpts.filename): self.newline() if self.genOpts.protectFeature: write('#ifndef', self.featureName, file=self.outFile) # If type declarations are needed by other features based on # this one, it may be necessary to suppress the ExtraProtect, # or move it below the 'for section...' loop. if self.featureExtraProtect is not None: write('#ifdef', self.featureExtraProtect, file=self.outFile) self.newline() write('#define', self.featureName, '1', file=self.outFile) for section in self.TYPE_SECTIONS: # OpenXR: # If we need the explicit include of the external platform header, # put it right before the function pointer definitions if section == "funcpointer" and self.need_platform_include: write('// Include for OpenXR Platform-Specific Types', file=self.outFile) write('#include "openxr_platform.h"', file=self.outFile) self.newline() self.need_platform_include = False contents = self.sections[section] if contents: write('\n'.join(contents), file=self.outFile) if self.genOpts.genFuncPointers and self.sections['commandPointer']: write('\n'.join(self.sections['commandPointer']), file=self.outFile) self.newline() if self.sections['command']: if self.genOpts.protectProto: write(self.genOpts.protectProto, self.genOpts.protectProtoStr, file=self.outFile) write('\n'.join(self.sections['command']), end='', file=self.outFile) if self.genOpts.protectProto: write('#endif', file=self.outFile) else: self.newline() if self.featureExtraProtect is not None: write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile) if self.genOpts.protectFeature: write('#endif /*', self.featureName, '*/', file=self.outFile) # Finish processing in superclass OutputGenerator.endFeature(self)
def outputGeneratedHeaderWarning(self): generated_warning = '// *********** THIS FILE IS GENERATED - DO NOT EDIT ***********\n' generated_warning += '// See utility_source_generator.py for modifications\n' generated_warning += '// ************************************************************\n' write(generated_warning, file=self.outFile)
def endFile(self): # write types.d file write(TYPES_HEADER.format(PACKAGE_PREFIX = self.genOpts.packagePrefix, HEADER_VERSION = self.headerVersion) + self.typesFileContent, file=self.typesFile) # write functions.d file write("}}\n\n__gshared {{{GLOBAL_FUNCTION_DEFINITIONS}\n}}\n".format(GLOBAL_FUNCTION_DEFINITIONS = self.functionTypeDefinition), file=self.funcsFile) write("""\ /// if not using version "with-derelict-loader" this function must be called first /// sets vkCreateInstance function pointer and acquires basic functions to retrieve information about the implementation void loadGlobalLevelFunctions(typeof(vkGetInstanceProcAddr) getProcAddr) { vkGetInstanceProcAddr = getProcAddr; vkEnumerateInstanceExtensionProperties = cast(typeof(vkEnumerateInstanceExtensionProperties)) vkGetInstanceProcAddr(null, "vkEnumerateInstanceExtensionProperties"); vkEnumerateInstanceLayerProperties = cast(typeof(vkEnumerateInstanceLayerProperties)) vkGetInstanceProcAddr(null, "vkEnumerateInstanceLayerProperties"); vkCreateInstance = cast(typeof(vkCreateInstance)) vkGetInstanceProcAddr(null, "vkCreateInstance"); } /// with a valid VkInstance call this function to retrieve additional VkInstance, VkPhysicalDevice, ... related functions void loadInstanceLevelFunctions(VkInstance instance) { assert(vkGetInstanceProcAddr !is null, "Must call loadGlobalLevelFunctions before loadInstanceLevelFunctions");\ """ + self.instanceLevelFunctions + """\ } /// with a valid VkInstance call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call indirectly through the VkInstance and will be internally dispatched by the implementation /// use loadDeviceLevelFunctions(VkDevice device) bellow to avoid this indirection and get the pointers directly form a VkDevice void loadDeviceLevelFunctions(VkInstance instance) { assert(vkGetInstanceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions");\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE = "Instance", instance_or_device = "instance") + """\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call directly VkDevice and related resources and can be retrieved for one and only one VkDevice /// calling this function again with another VkDevices will overwrite the __gshared functions retrieved previously /// use createGroupedDeviceLevelFunctions bellow if usage of multiple VkDevices is required void loadDeviceLevelFunctions(VkDevice device) { assert(vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions");\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE = "Device", instance_or_device = "device") + """\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions grouped in a DispatchDevice struct /// the functions call directly VkDevice and related resources and can be retrieved for any VkDevice DispatchDevice createDispatchDeviceLevelFunctions(VkDevice device) { assert(vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions"); DispatchDevice dispatchDevice; with(dispatchDevice) {\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE = "Device", instance_or_device = "device").replace('\t\t', '\t\t\t') + """\ }} return dispatchDevice; }} // struct to group per device deviceLevelFunctions into a custom namespace private struct DispatchDevice {{{DISPATCH_FUNCTION_DEFINITIONS} }} // Derelict loader to acquire entry point vkGetInstanceProcAddr version({NAME_PREFIX_UCASE}_FROM_DERELICT) {{ import derelict.util.loader; import derelict.util.system; private {{ version(Windows) enum libNames = "vulkan-1.dll"; else version(Posix) enum libNames = "libvulkan.so.1"; else static assert(0,"Need to implement Vulkan libNames for this operating system."); }} class Derelict{NAME_PREFIX}Loader : SharedLibLoader {{ this() {{ super(libNames); }} protected override void loadSymbols() {{ typeof(vkGetInstanceProcAddr) getProcAddr; bindFunc(cast(void**)&getProcAddr, "vkGetInstanceProcAddr"); loadGlobalLevelFunctions(getProcAddr); }} }} __gshared Derelict{NAME_PREFIX}Loader Derelict{NAME_PREFIX}; shared static this() {{ Derelict{NAME_PREFIX} = new Derelict{NAME_PREFIX}Loader(); }} }} """.format( NAME_PREFIX = self.genOpts.namePrefix, NAME_PREFIX_UCASE = self.genOpts.namePrefix.upper(), DISPATCH_FUNCTION_DEFINITIONS = self.dispatchTypeDefinition), file=self.funcsFile) self.typesFile.close() self.funcsFile.close()
def beginFile(self, genOpts): OutputGenerator.beginFile(self, genOpts) # C-specific # # Multiple inclusion protection & C++ wrappers. if genOpts.protectFile and self.genOpts.filename: headerSym = re.sub(r'\.h', '_h_', os.path.basename( self.genOpts.filename)).upper() write('#ifndef', headerSym, file=self.outFile) write('#define', headerSym, '1', file=self.outFile) self.newline() # User-supplied prefix text, if any (list of strings) if genOpts.prefixText: for s in genOpts.prefixText: write(s, file=self.outFile) # C++ extern wrapper - after prefix lines so they can add includes. self.newline() write('#ifdef __cplusplus', file=self.outFile) write('extern "C" {', file=self.outFile) write('#endif', file=self.outFile) self.newline()
def endFeature(self): if self.emit: # first write all types into types.d # special treat for platform surface extension which get wraped into a version block extIndent = self.surfaceExtensionVersionIndent self.typesFileContent += "\n{0}\n".format(self.currentFeature) surfaceVersion = "" if self.isSurfaceExtension: surfaceVersion = "version({0}) {{".format(self.surfaceExtensions[self.currentFeature][0]) self.typesFileContent += "{0}\n\t{1}\n".format(surfaceVersion, self.surfaceExtensions[self.currentFeature][1]) isFirstSectionInFeature = True # for output file formating for section in self.TYPE_SECTIONS: # write contents of type section contents = self.sections[section] if contents: # check if opaque structs were registered and write tem into types file if section == 'struct': if self.opaqueStruct: for opaque in self.opaqueStruct: self.typesFileContent += "{1}struct {0};\n".format(opaque, extIndent) self.typesFileContent += '\n' elif not isFirstSectionInFeature: self.typesFileContent += '\n' # for output file formating isFirstSectionInFeature = False # write the rest of the contents, eg. enums, structs, etc. into types file for content in self.sections[section]: self.typesFileContent += "{1}{0}\n".format(content, extIndent) if self.isSurfaceExtension: self.typesFileContent += "}\n" # currently the commandPointer token is not used if self.genOpts.genFuncPointers and self.sections['commandPointer']: if self.isSurfaceExtension: write(surfaceVersion, file=self.funcsFile) write(extIndent + ('\n' + extIndent).join(self.sections['commandPointer']), file=self.funcsFile) if self.isSurfaceExtension: write("}", file=self.funcsFile) write('', file=self.funcsFile) # write function aliases into functions.d and build strings for later injection if self.sections['command']: # update indention of currentFeature for functions.d content self.currentFeature = "\t" + self.currentFeature; # write the aliases to function types write("\n{0}".format(self.currentFeature), file=self.funcsFile) if self.isSurfaceExtension: write("\t" + surfaceVersion, file=self.funcsFile) write(extIndent + ('\n' + extIndent).join(self.sections['command']), file=self.funcsFile) if self.isSurfaceExtension: write("\t}", file=self.funcsFile) # capture if function is a instance or device level function inInstanceLevelFuncNames = False inDeviceLevelFuncNames = False # comment the current feature self.functionTypeDefinition += "\n\n{0}".format(self.currentFeature) # surface extension version directive if self.isSurfaceExtension: self.functionTypeDefinition += "\n\t" + surfaceVersion # create string of functionTypes functionVars for command in self.sections['command']: name = self.functionTypeName[command] self.functionTypeDefinition += "\n\t{1}PFN_{0} {0};".format(name, extIndent) # query if the current function is in instance or deviceLevelFuncNames for the next step if not inInstanceLevelFuncNames and name in self.instanceLevelFuncNames: inInstanceLevelFuncNames = True if not inDeviceLevelFuncNames and name in self.deviceLevelFuncNames: inDeviceLevelFuncNames = True # surface extension version closing curly brace if self.isSurfaceExtension: self.functionTypeDefinition += "\n\t}" # create a strings to load instance level functions if inInstanceLevelFuncNames: # comment the current feature self.instanceLevelFunctions += "\n\n{0}".format(self.currentFeature) # surface extension version directive if self.isSurfaceExtension: self.instanceLevelFunctions += "\n\t" + surfaceVersion # set of global level function names, function pointers are ignored here are set in endFile method gloablLevelFuncNames = {"vkGetInstanceProcAddr", "vkEnumerateInstanceExtensionProperties", "vkEnumerateInstanceLayerProperties", "vkCreateInstance"} # build the commands for command in self.sections['command']: name = self.functionTypeName[command] if name in self.instanceLevelFuncNames and name not in gloablLevelFuncNames: self.instanceLevelFunctions += "\n\t{1}{0} = cast(typeof({0})) vkGetInstanceProcAddr(instance, \"{0}\");".format(name, extIndent) # surface extension version closing curly brace if self.isSurfaceExtension: self.instanceLevelFunctions += "\n\t}" # create a strings to load device level functions if inDeviceLevelFuncNames: # comment the current feature self.deviceLevelFunctions += "\n\n{0}".format(self.currentFeature) # surface extension version directive if self.isSurfaceExtension: self.deviceLevelFunctions += "\n\t" + surfaceVersion # build the commands for command in self.sections['command']: name = self.functionTypeName[command] if name in self.deviceLevelFuncNames: self.deviceLevelFunctions += "\n\t{1}{0} = cast(typeof({0})) vkGet{{INSTANCE_OR_DEVICE}}ProcAddr({{instance_or_device}}, \"{0}\");".format(name, extIndent) # this function type definitions end up in the DispatchDevice struct self.dispatchTypeDefinition += "\n\t{1}PFN_{0} {0};".format(name, extIndent) # surface extension version closing curly brace if self.isSurfaceExtension: self.deviceLevelFunctions += "\n\t}" # Finish processing in superclass OutputGenerator.endFeature(self)
# options. The options are set before XML loading as they may affect it. reg = Registry(gen, options) # Parse the specified registry XML into an ElementTree objec startTimer(args.time) tree = etree.parse(args.registry) endTimer(args.time, '* Time to make ElementTree =') # Load the XML tree into the registry object startTimer(args.time) reg.loadElementTree(tree) endTimer(args.time, '* Time to parse ElementTree =') if (args.validate): reg.validateGroups() if (args.dump): write('* Dumping registry to regdump.txt', file=sys.stderr) reg.dumpReg(filehandle=open('regdump.txt', 'w', encoding='utf-8')) # Finally, use the output generator to create the requested targe if (args.debug): pdb.run('reg.apiGen()') else: startTimer(args.time) reg.apiGen() endTimer(args.time, '* Time to generate ' + options.filename + ' =') if not args.quiet: write('* Generated', options.filename, file=sys.stderr)
def endFile(self): # write types.d file write(TYPES_HEADER.format(PACKAGE_PREFIX=self.genOpts.packagePrefix, HEADER_VERSION=self.headerVersion) + self.typesFileContent, file=self.typesFile) # write functions.d file write("}}\n\n__gshared {{{GLOBAL_FUNCTION_DEFINITIONS}\n}}\n".format( GLOBAL_FUNCTION_DEFINITIONS=self.functionTypeDefinition), file=self.funcsFile) write("""\ /// if not using version "with-derelict-loader" this function must be called first /// sets vkCreateInstance function pointer and acquires basic functions to retrieve information about the implementation void loadGlobalLevelFunctions( typeof( vkGetInstanceProcAddr ) getProcAddr ) { vkGetInstanceProcAddr = getProcAddr; vkEnumerateInstanceExtensionProperties = cast( typeof( vkEnumerateInstanceExtensionProperties )) vkGetInstanceProcAddr( null, "vkEnumerateInstanceExtensionProperties" ); vkEnumerateInstanceLayerProperties = cast( typeof( vkEnumerateInstanceLayerProperties )) vkGetInstanceProcAddr( null, "vkEnumerateInstanceLayerProperties" ); vkCreateInstance = cast( typeof( vkCreateInstance )) vkGetInstanceProcAddr( null, "vkCreateInstance" ); } /// with a valid VkInstance call this function to retrieve additional VkInstance, VkPhysicalDevice, ... related functions void loadInstanceLevelFunctions( VkInstance instance ) { assert( vkGetInstanceProcAddr !is null, "Must call loadGlobalLevelFunctions before loadInstanceLevelFunctions" );\ """ + self.instanceLevelFunctions + """\n\ } /// with a valid VkInstance call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call indirectly through the VkInstance and will be internally dispatched by the implementation /// use loadDeviceLevelFunctions( VkDevice device ) bellow to avoid this indirection and get the pointers directly form a VkDevice void loadDeviceLevelFunctions( VkInstance instance ) { assert( vkGetInstanceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions" );\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE="Instance", instance_or_device="instance") + """\n\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call directly VkDevice and related resources and can be retrieved for one and only one VkDevice /// calling this function again with another VkDevices will overwrite the __gshared functions retrieved previously /// use createGroupedDeviceLevelFunctions bellow if usage of multiple VkDevices is required void loadDeviceLevelFunctions( VkDevice device ) { assert( vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions" );\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE="Device", instance_or_device="device") + """\n\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions grouped in a DispatchDevice struct /// the functions call directly VkDevice and related resources and can be retrieved for any VkDevice deprecated( \"Use DispatchDevice( VkDevice ) or DispatchDevice.loadDeviceLevelFunctions( VkDevice ) instead\" ) DispatchDevice createDispatchDeviceLevelFunctions( VkDevice device ) { return DispatchDevice( device ); } // struct to group per device deviceLevelFunctions into a custom namespace // keeps track of the device to which the functions are bound struct DispatchDevice { private VkDevice device = VK_NULL_HANDLE; VkCommandBuffer commandBuffer; // return copy of the internal VkDevice VkDevice vkDevice() { return device; } // Constructor forwards parameter 'device' to 'this.loadDeviceLevelFunctions' this( VkDevice device ) { this.loadDeviceLevelFunctions( device ); } // load the device level member functions // this also sets the private member 'device' to the passed in VkDevice // now the DispatchDevice can be used e.g.: // auto dd = DispatchDevice( device ); // dd.vkDestroyDevice( dd.vkDevice, pAllocator ); // convenience functions to omit the first arg do exist, see bellow void loadDeviceLevelFunctions( VkDevice device ) { assert( vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions" ); this.device = device;\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE="Device", instance_or_device="device").replace( '\t', '\t\t').replace( '\t\t\t\t', '\t\t\t') + """\n\ } // Convenience member functions, forwarded to corresponding vulkan functions // If the first arg of the vulkan function is VkDevice it can be omitted // private 'DipatchDevice' member 'device' will be passed to the forwarded vulkan functions // the crux is that function pointers can't be overloaded with regular functions // hence the vk prefix is ditched for the convenience variants // e.g.: // auto dd = DispatchDevice( device ); // dd.DestroyDevice( pAllocator ); // instead of: dd.vkDestroyDevice( dd.vkDevice, pAllocator ); // // Same mechanism works with functions which require a VkCommandBuffer as first arg // In this case the public member 'commandBuffer' must be set beforehand // e.g.: // dd.commandBuffer = some_command_buffer; // dd.BeginCommandBuffer( &beginInfo ); // dd.CmdBindPipeline( VK_PIPELINE_BIND_POINT_GRAPHICS, some_pipeline ); // // Does not work with queues, there are just too few queue related functions""" + self.dispatchConvenienceFunctions + """\n\ // Member vulkan function decelerations{DISPATCH_FUNCTION_DEFINITIONS} }} // Derelict loader to acquire entry point vkGetInstanceProcAddr version( {NAME_PREFIX_UCASE}_FROM_DERELICT ) {{ import derelict.util.loader; import derelict.util.system; private {{ version( Windows ) enum libNames = "vulkan-1.dll"; else version( Posix ) enum libNames = "libvulkan.so.1"; else static assert( 0,"Need to implement Vulkan libNames for this operating system." ); }} class Derelict{NAME_PREFIX}Loader : SharedLibLoader {{ this() {{ super( libNames ); }} protected override void loadSymbols() {{ typeof( vkGetInstanceProcAddr ) getProcAddr; bindFunc( cast( void** )&getProcAddr, "vkGetInstanceProcAddr" ); loadGlobalLevelFunctions( getProcAddr ); }} }} __gshared Derelict{NAME_PREFIX}Loader Derelict{NAME_PREFIX}; shared static this() {{ Derelict{NAME_PREFIX} = new Derelict{NAME_PREFIX}Loader(); }} }} """.format(NAME_PREFIX=self.genOpts.namePrefix, NAME_PREFIX_UCASE=self.genOpts.namePrefix.upper(), DISPATCH_FUNCTION_DEFINITIONS=self.dispatchTypeDefinition), file=self.funcsFile) self.typesFile.close() self.funcsFile.close()
def beginFile(self, genOpts): OutputGenerator.beginFile(self, genOpts) # C-specific # # Multiple inclusion protection & C++ wrappers. # Internal state - accumulators for function pointers and function # pointer initialization self.pointers = [] self.pointerInitializersInstance = [] self.pointerInitializersDevice = [] # # Write header protection filename = self.genOpts.directory + '/' + 'vulkan_ext.h' self.outFileHeader = open(filename, 'w', encoding='utf-8') write('#ifndef VULKAN_EXT_H', file=self.outFileHeader) write('#define VULKAN_EXT_H', file=self.outFileHeader) write('', file=self.outFileHeader) write('#ifdef __cplusplus', file=self.outFileHeader) write('extern "C" {', file=self.outFileHeader) write('#endif', file=self.outFileHeader) # # User-supplied prefix text, if any (list of strings) if genOpts.prefixText: for s in genOpts.prefixText: write(s, file=self.outFile) write(s, file=self.outFileHeader) write(doc, file=self.outFileHeader) write('#include <openxr/openxr.h>', file=self.outFile) self.newline() write('#include <openxr/openxr.h>', file=self.outFileHeader) write('', file=self.outFileHeader) write('void vkExtInitInstance(VkInstance instance);', file=self.outFileHeader) write('void vkExtInitDevice(VkDevice device);', file=self.outFileHeader) write('', file=self.outFileHeader)
# Load & parse registry reg = Registry() startTimer(args.time) tree = etree.parse(args.registry) endTimer(args.time, '* Time to make ElementTree =') startTimer(args.time) reg.loadElementTree(tree) endTimer(args.time, '* Time to parse ElementTree =') if (args.validate): reg.validateGroups() if (args.dump): write('* Dumping registry to regdump.txt', file=sys.stderr) reg.dumpReg(filehandle=open('regdump.txt', 'w', encoding='utf-8')) # create error/warning & diagnostic files if (args.errfile): errWarn = open(args.errfile, 'w', encoding='utf-8') else: errWarn = sys.stderr if (args.diagfile): diag = open(args.diagfile, 'w', encoding='utf-8') else: diag = None if (args.debug): pdb.run('genTarget(args)')
def endFile(self): for pointer in self.pointers: write(pointer, file=self.outFile) self.newline() write('void vkExtInitInstance(VkInstance instance)\n{', file=self.outFile) for pointerInitializer in self.pointerInitializersInstance: write(pointerInitializer, file=self.outFile) write('}', file=self.outFile) self.newline() write('void vkExtInitDevice(VkDevice device)\n{', file=self.outFile) for pointerInitializer in self.pointerInitializersDevice: write(pointerInitializer, file=self.outFile) write('}', file=self.outFile) self.newline() #Finish header file write('#ifdef __cplusplus', file=self.outFileHeader) write('}', file=self.outFileHeader) write('#endif', file=self.outFileHeader) write('', file=self.outFileHeader) write('#endif', file=self.outFileHeader) self.outFileHeader.close() # Finish processing in superclass OutputGenerator.endFile(self)
def writeInclude(self, directory, basename, contents): """Generate an include file. - directory - subdirectory to put file in - basename - base name of the file - contents - contents of the file (Asciidoc boilerplate aside)""" # Create subdirectory, if needed directory = self.genOpts.directory + '/' + directory self.makeDir(directory) # Create file filename = directory + '/' + basename + '.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Asciidoc anchor write(self.genOpts.conventions.warning_comment, file=fp) write('[[{0}]]'.format(basename), file=fp) if self.genOpts.conventions.generate_index_terms: if basename.startswith(self.conventions.command_prefix): index_term = basename + " (function)" elif basename.startswith(self.conventions.type_prefix): index_term = basename + " (type)" elif basename.startswith(self.conventions.api_prefix): index_term = basename + " (define)" else: index_term = basename write('indexterm:[{}]'.format(index_term), file=fp) write('[source,c++]', file=fp) write('----', file=fp) write(contents, file=fp) write('----', file=fp) fp.close() if self.genOpts.secondaryInclude: # Create secondary no cross-reference include file filename = directory + '/' + basename + '.no-xref.txt' self.logMsg('diag', '# Generating include file:', filename) fp = open(filename, 'w', encoding='utf-8') # Asciidoc anchor write(self.genOpts.conventions.warning_comment, file=fp) write('// Include this no-xref version without cross reference id for multiple includes of same file', file=fp) write('[source,c++]', file=fp) write('----', file=fp) write(contents, file=fp) write('----', file=fp) fp.close()
def endFile(self): # gather the types that are needed to generate types_to_gen = set() for s in enums_to_gen: types_to_gen.add(s) for f in flags_to_gen: types_to_gen.add(f) types_to_gen = types_to_gen.union( GatherTypesToGen(self.all_structures, structures_to_gen)) for key in EXTENSION_CATEGORIES.keys(): types_to_gen = types_to_gen.union( GatherTypesToGen(self.all_structures, self.extension_sets[key])) names_of_structures_to_gen = set() for s in self.all_structures: if s.name in types_to_gen: names_of_structures_to_gen.add(s.name) structs_to_comp = set() for s in struct_comparisons_to_gen: structs_to_comp.add(s) structs_to_comp = structs_to_comp.union( GatherTypesToGen(self.all_structures, struct_comparisons_to_gen)) for key, value in self.extension_sets.items(): self.extension_sets[key] = sorted(value) alias_versions = {} for version in self.vulkan_versions: for aliased_type, aliases in self.aliases.items(): for alias in aliases: if alias in version.names: alias_versions[alias] = version.minorVersion # print the types gathered out = '' out += license_header + "\n" out += "#include \"vulkaninfo.h\"\n" out += "#include \"outputprinter.h\"\n" out += custom_formaters for enum in (e for e in self.enums if e.name in types_to_gen): out += PrintEnumToString(enum, self) out += PrintEnum(enum, self) for flag in self.flags: if flag.name in types_to_gen: for bitmask in (b for b in self.bitmasks if b.name == flag.enum): out += PrintBitMask(bitmask, flag.name, self) if flag.name in flags_strings_to_gen: for bitmask in (b for b in self.bitmasks if b.name == flag.enum): out += PrintBitMaskToString(bitmask, flag.name, self) for s in ( x for x in self.all_structures if x.name in types_to_gen and x.name not in struct_blacklist): out += PrintStructure(s, types_to_gen, names_of_structures_to_gen) out += "pNextChainInfos get_chain_infos() {\n" out += " pNextChainInfos infos;\n" for key in EXTENSION_CATEGORIES.keys(): out += PrintChainBuilders(key, self.extension_sets[key], self.all_structures) out += " return infos;\n}\n" for key, value in EXTENSION_CATEGORIES.items(): out += PrintChainIterator(key, self.extension_sets[key], self.all_structures, value.get('type'), self.extTypes, self.aliases, self.vulkan_versions) for s in (x for x in self.all_structures if x.name in structs_to_comp): out += PrintStructComparison(s) for s in (x for x in self.all_structures if x.name in struct_short_versions_to_gen): out += PrintStructShort(s) gen.write(out, file=self.outFile) gen.OutputGenerator.endFile(self)
def dumpReg(self, maxlen=40, filehandle=sys.stdout): """Dump all the dictionaries constructed from the Registry object""" write("***************************************", file=filehandle) write(" ** Dumping Registry contents **", file=filehandle) write("***************************************", file=filehandle) write("// Types", file=filehandle) for name in self.typedict: tobj = self.typedict[name] write(" Type", name, "->", etree.tostring(tobj.elem)[0:maxlen], file=filehandle) write("// Groups", file=filehandle) for name in self.groupdict: gobj = self.groupdict[name] write(" Group", name, "->", etree.tostring(gobj.elem)[0:maxlen], file=filehandle) write("// Enums", file=filehandle) for name in self.enumdict: eobj = self.enumdict[name] write(" Enum", name, "->", etree.tostring(eobj.elem)[0:maxlen], file=filehandle) write("// Commands", file=filehandle) for name in self.cmddict: cobj = self.cmddict[name] write(" Command", name, "->", etree.tostring(cobj.elem)[0:maxlen], file=filehandle) write("// APIs", file=filehandle) for key in self.apidict: write(" API Version ", key, "->", etree.tostring(self.apidict[key].elem)[0:maxlen], file=filehandle) write("// Extensions", file=filehandle) for key in self.extdict: write(" Extension", key, "->", etree.tostring(self.extdict[key].elem)[0:maxlen], file=filehandle)
def resolveDeprecationChain(self, extensionsList, succeededBy, isRefpage, file): ext = next(x for x in extensionsList if x.name == succeededBy) if ext.deprecationType: if ext.deprecationType == 'promotion': if ext.supercedingAPIVersion: write(' ** Which in turn was _promoted_ to\n' + ext.conditionalLinkCoreAPI(ext.supercedingAPIVersion, '-promotions', isRefpage), file=file) else: # ext.supercedingExtension write(' ** Which in turn was _promoted_ to extension\n' + ext.conditionalLinkExt(ext.supercedingExtension), file=file) ext.resolveDeprecationChain(extensionsList, ext.supercedingExtension, file) elif ext.deprecationType == 'deprecation': if ext.supercedingAPIVersion: write( ' ** Which in turn was _deprecated_ by\n' + ext.conditionalLinkCoreAPI(ext.supercedingAPIVersion, '-new-feature', isRefpage), file=file) elif ext.supercedingExtension: write(' ** Which in turn was _deprecated_ by\n' + ext.conditionalLinkExt(ext.supercedingExtension) + ' extension', file=file) ext.resolveDeprecationChain(extensionsList, ext.supercedingExtension, file) else: write( ' ** Which in turn was _deprecated_ without replacement', file=file) elif ext.deprecationType == 'obsoletion': if ext.supercedingAPIVersion: write( ' ** Which in turn was _obsoleted_ by\n' + ext.conditionalLinkCoreAPI(ext.supercedingAPIVersion, '-new-feature', isRefpage), file=file) elif ext.supercedingExtension: write(' ** Which in turn was _obsoleted_ by\n' + ext.conditionalLinkExt(ext.supercedingExtension) + ' extension', file=file) ext.resolveDeprecationChain(extensionsList, ext.supercedingExtension, file) else: write( ' ** Which in turn was _obsoleted_ without replacement', file=file) else: # should be unreachable self.generator.logMsg( 'error', 'Logic error in resolveDeprecationChain(): deprecationType is neither \'promotion\', \'deprecation\' nor \'obsoletion\'!' )
def endFile(self): self.extensions.sort() for ext in self.extensions: ext.makeMetafile(self.extensions) promotedExtensions = {} for ext in self.extensions: if ext.deprecationType == 'promotion' and ext.supercedingAPIVersion: promotedExtensions.setdefault(ext.supercedingAPIVersion, []).append(ext) for coreVersion, extensions in promotedExtensions.items(): promoted_extensions_fp = self.newFile(self.directory + '/promoted_extensions_' + coreVersion + self.file_suffix) for ext in extensions: indent = '' write(' * {blank}\n+\n' + ext.conditionalLinkExt(ext.name, indent), file=promoted_extensions_fp) promoted_extensions_fp.close() # Re-sort to match earlier behavior # TODO: Remove this extra sort when re-arranging section order OK. def makeSortKey(ext): name = ext.name.lower() prefixes = self.conventions.extension_index_prefixes for i, prefix in enumerate(prefixes): if ext.name.startswith(prefix): return (i, name) return (len(prefixes), name) self.extensions.sort(key=makeSortKey) with self.newFile(self.directory + '/current_extensions_appendix' + self.file_suffix) as current_extensions_appendix_fp, \ self.newFile(self.directory + '/deprecated_extensions_appendix' + self.file_suffix) as deprecated_extensions_appendix_fp, \ self.newFile(self.directory + '/current_extension_appendices' + self.file_suffix) as current_extension_appendices_fp, \ self.newFile(self.directory + '/current_extension_appendices_toc' + self.file_suffix) as current_extension_appendices_toc_fp, \ self.newFile(self.directory + '/deprecated_extension_appendices' + self.file_suffix) as deprecated_extension_appendices_fp, \ self.newFile(self.directory + '/deprecated_extension_appendices_toc' + self.file_suffix) as deprecated_extension_appendices_toc_fp, \ self.newFile(self.directory + '/deprecated_extensions_guard_macro' + self.file_suffix) as deprecated_extensions_guard_macro_fp, \ self.newFile(self.directory + '/provisional_extensions_appendix' + self.file_suffix) as provisional_extensions_appendix_fp, \ self.newFile(self.directory + '/provisional_extension_appendices' + self.file_suffix) as provisional_extension_appendices_fp, \ self.newFile(self.directory + '/provisional_extension_appendices_toc' + self.file_suffix) as provisional_extension_appendices_toc_fp, \ self.newFile(self.directory + '/provisional_extensions_guard_macro' + self.file_suffix) as provisional_extensions_guard_macro_fp: write('include::deprecated_extensions_guard_macro' + self.file_suffix + '[]', file=current_extensions_appendix_fp) write('', file=current_extensions_appendix_fp) write('ifndef::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('[[extension-appendices-list]]', file=current_extensions_appendix_fp) write('== List of Extensions', file=current_extensions_appendix_fp) write('endif::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('ifdef::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('[[extension-appendices-list]]', file=current_extensions_appendix_fp) write('== List of Current Extensions', file=current_extensions_appendix_fp) write('endif::HAS_DEPRECATED_EXTENSIONS[]', file=current_extensions_appendix_fp) write('', file=current_extensions_appendix_fp) write('include::current_extension_appendices_toc' + self.file_suffix + '[]', file=current_extensions_appendix_fp) write('<<<', file=current_extensions_appendix_fp) write('include::current_extension_appendices' + self.file_suffix + '[]', file=current_extensions_appendix_fp) write('include::deprecated_extensions_guard_macro' + self.file_suffix + '[]', file=deprecated_extensions_appendix_fp) write('', file=deprecated_extensions_appendix_fp) write('ifdef::HAS_DEPRECATED_EXTENSIONS[]', file=deprecated_extensions_appendix_fp) write('[[deprecated-extension-appendices-list]]', file=deprecated_extensions_appendix_fp) write('== List of Deprecated Extensions', file=deprecated_extensions_appendix_fp) write('include::deprecated_extension_appendices_toc' + self.file_suffix + '[]', file=deprecated_extensions_appendix_fp) write('<<<', file=deprecated_extensions_appendix_fp) write('include::deprecated_extension_appendices' + self.file_suffix + '[]', file=deprecated_extensions_appendix_fp) write('endif::HAS_DEPRECATED_EXTENSIONS[]', file=deprecated_extensions_appendix_fp) # add include guard to allow multiple includes write('ifndef::DEPRECATED_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD[]', file=deprecated_extensions_guard_macro_fp) write(':DEPRECATED_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD:\n', file=deprecated_extensions_guard_macro_fp) write('ifndef::PROVISIONAL_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD[]', file=provisional_extensions_guard_macro_fp) write(':PROVISIONAL_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD:\n', file=provisional_extensions_guard_macro_fp) write('include::provisional_extensions_guard_macro' + self.file_suffix + '[]', file=provisional_extensions_appendix_fp) write('', file=provisional_extensions_appendix_fp) write('ifdef::HAS_PROVISIONAL_EXTENSIONS[]', file=provisional_extensions_appendix_fp) write('[[provisional-extension-appendices-list]]', file=provisional_extensions_appendix_fp) write('== List of Provisional Extensions', file=provisional_extensions_appendix_fp) write('include::provisional_extension_appendices_toc' + self.file_suffix + '[]', file=provisional_extensions_appendix_fp) write('<<<', file=provisional_extensions_appendix_fp) write('include::provisional_extension_appendices' + self.file_suffix + '[]', file=provisional_extensions_appendix_fp) write('endif::HAS_PROVISIONAL_EXTENSIONS[]', file=provisional_extensions_appendix_fp) for ext in self.extensions: include = self.makeExtensionInclude(ext) link = ' * <<' + ext.name + '>>' if ext.provisional == 'true': write(self.conditionalExt(ext.name, include), file=provisional_extension_appendices_fp) write(self.conditionalExt(ext.name, link), file=provisional_extension_appendices_toc_fp) write(self.conditionalExt(ext.name, ':HAS_PROVISIONAL_EXTENSIONS:'), file=provisional_extensions_guard_macro_fp) elif ext.deprecationType is None: write(self.conditionalExt(ext.name, include), file=current_extension_appendices_fp) write(self.conditionalExt(ext.name, link), file=current_extension_appendices_toc_fp) else: condition = ext.supercedingAPIVersion if ext.supercedingAPIVersion else ext.supercedingExtension # potentially None too write(self.conditionalExt(ext.name, include, 'ifndef', condition), file=current_extension_appendices_fp) write(self.conditionalExt(ext.name, link, 'ifndef', condition), file=current_extension_appendices_toc_fp) write(self.conditionalExt(ext.name, include, 'ifdef', condition), file=deprecated_extension_appendices_fp) write(self.conditionalExt(ext.name, link, 'ifdef', condition), file=deprecated_extension_appendices_toc_fp) write(self.conditionalExt(ext.name, ':HAS_DEPRECATED_EXTENSIONS:', 'ifdef', condition), file=deprecated_extensions_guard_macro_fp) write('endif::DEPRECATED_EXTENSIONS_GUARD_MACRO_INCLUDE_GUARD[]', file=deprecated_extensions_guard_macro_fp) OutputGenerator.endFile(self)
def dumpReg(self, maxlen=40, filehandle=sys.stdout): """Dump all the dictionaries constructed from the Registry object""" write('***************************************', file=filehandle) write(' ** Dumping Registry contents **', file=filehandle) write('***************************************', file=filehandle) write('// Types', file=filehandle) for name in self.typedict: tobj = self.typedict[name] write(' Type', name, '->', etree.tostring(tobj.elem)[0:maxlen], file=filehandle) write('// Groups', file=filehandle) for name in self.groupdict: gobj = self.groupdict[name] write(' Group', name, '->', etree.tostring(gobj.elem)[0:maxlen], file=filehandle) write('// Enums', file=filehandle) for name in self.enumdict: eobj = self.enumdict[name] write(' Enum', name, '->', etree.tostring(eobj.elem)[0:maxlen], file=filehandle) write('// Commands', file=filehandle) for name in self.cmddict: cobj = self.cmddict[name] write(' Command', name, '->', etree.tostring(cobj.elem)[0:maxlen], file=filehandle) write('// APIs', file=filehandle) for key in self.apidict: write(' API Version ', key, '->', etree.tostring(self.apidict[key].elem)[0:maxlen], file=filehandle) write('// Extensions', file=filehandle) for key in self.extdict: write(' Extension', key, '->', etree.tostring(self.extdict[key].elem)[0:maxlen], file=filehandle)
# Load & parse registry reg = Registry() startTimer(args.time) tree = etree.parse(args.registry) endTimer(args.time, '* Time to make ElementTree =') startTimer(args.time) reg.loadElementTree(tree) endTimer(args.time, '* Time to parse ElementTree =') if (args.validate): reg.validateGroups() if (args.dump): write('* Dumping registry to regdump.txt', file=sys.stderr) reg.dumpReg(filehandle = open('regdump.txt','w', encoding='utf-8')) # create error/warning & diagnostic files if (args.errfile): errWarn = open(args.errfile, 'w', encoding='utf-8') else: errWarn = sys.stderr if (args.diagfile): diag = open(args.diagfile, 'w', encoding='utf-8') else: diag = None if (args.debug): pdb.run('genTarget(args)')
protect = True target = None timeit = False validate = False # Default input / log files errFilename = None diagFilename = 'diag.txt' regFilename = 'vk.xml' if __name__ == '__main__': i = 1 while (i < len(sys.argv)): arg = sys.argv[i] i = i + 1 if (arg == '-debug'): write('Enabling debug (-debug)', file=sys.stderr) debug = True elif (arg == '-dump'): write('Enabling dump (-dump)', file=sys.stderr) dump = True elif (arg == '-noprotect'): write('Disabling inclusion protection in output headers', file=sys.stderr) protect = False elif (arg == '-profile'): write('Enabling profiling (-profile)', file=sys.stderr) profile = True elif (arg == '-registry'): regFilename = sys.argv[i] i = i + 1 write('Using registry ', regFilename, file=sys.stderr)
def endFile(self): # write types.d file write(TYPES_HEADER.format(PACKAGE_PREFIX=self.genOpts.packagePrefix, HEADER_VERSION=self.headerVersion) + self.typesFileContent, file=self.typesFile) # write functions.d file write("}}\n\n__gshared {{{GLOBAL_FUNCTION_DEFINITIONS}\n}}\n".format( GLOBAL_FUNCTION_DEFINITIONS=self.functionTypeDefinition), file=self.funcsFile) write("""\ /// if not using version "with-derelict-loader" this function must be called first /// sets vkCreateInstance function pointer and acquires basic functions to retrieve information about the implementation void loadGlobalLevelFunctions(typeof(vkGetInstanceProcAddr) getProcAddr) { vkGetInstanceProcAddr = getProcAddr; vkEnumerateInstanceExtensionProperties = cast(typeof(vkEnumerateInstanceExtensionProperties)) vkGetInstanceProcAddr(null, "vkEnumerateInstanceExtensionProperties"); vkEnumerateInstanceLayerProperties = cast(typeof(vkEnumerateInstanceLayerProperties)) vkGetInstanceProcAddr(null, "vkEnumerateInstanceLayerProperties"); vkCreateInstance = cast(typeof(vkCreateInstance)) vkGetInstanceProcAddr(null, "vkCreateInstance"); } /// with a valid VkInstance call this function to retrieve additional VkInstance, VkPhysicalDevice, ... related functions void loadInstanceLevelFunctions(VkInstance instance) { assert(vkGetInstanceProcAddr !is null, "Must call loadGlobalLevelFunctions before loadInstanceLevelFunctions");\ """ + self.instanceLevelFunctions + """\ } /// with a valid VkInstance call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call indirectly through the VkInstance and will be internally dispatched by the implementation /// use loadDeviceLevelFunctions(VkDevice device) bellow to avoid this indirection and get the pointers directly form a VkDevice void loadDeviceLevelFunctions(VkInstance instance) { assert(vkGetInstanceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions");\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE="Instance", instance_or_device="instance") + """\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call directly VkDevice and related resources and can be retrieved for one and only one VkDevice /// calling this function again with another VkDevices will overwrite the __gshared functions retrieved previously /// use createGroupedDeviceLevelFunctions bellow if usage of multiple VkDevices is required void loadDeviceLevelFunctions(VkDevice device) { assert(vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions");\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE="Device", instance_or_device="device") + """\ } /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions grouped in a DispatchDevice struct /// the functions call directly VkDevice and related resources and can be retrieved for any VkDevice DispatchDevice createDispatchDeviceLevelFunctions(VkDevice device) { assert(vkGetDeviceProcAddr !is null, "Must call loadInstanceLevelFunctions before loadDeviceLevelFunctions"); DispatchDevice dispatchDevice; with(dispatchDevice) {\ """ + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE="Device", instance_or_device="device").replace( '\t\t', '\t\t\t') + """\ }} return dispatchDevice; }} // struct to group per device deviceLevelFunctions into a custom namespace private struct DispatchDevice {{{DISPATCH_FUNCTION_DEFINITIONS} }} // Derelict loader to acquire entry point vkGetInstanceProcAddr version({NAME_PREFIX_UCASE}_FROM_DERELICT) {{ import derelict.util.loader; import derelict.util.system; private {{ version(Windows) enum libNames = "vulkan-1.dll"; else version(Posix) enum libNames = "libvulkan.so.1"; else static assert(0,"Need to implement Vulkan libNames for this operating system."); }} class Derelict{NAME_PREFIX}Loader : SharedLibLoader {{ this() {{ super(libNames); }} protected override void loadSymbols() {{ typeof(vkGetInstanceProcAddr) getProcAddr; bindFunc(cast(void**)&getProcAddr, "vkGetInstanceProcAddr"); loadGlobalLevelFunctions(getProcAddr); }} }} __gshared Derelict{NAME_PREFIX}Loader Derelict{NAME_PREFIX}; shared static this() {{ Derelict{NAME_PREFIX} = new Derelict{NAME_PREFIX}Loader(); }} }} """.format(NAME_PREFIX=self.genOpts.namePrefix, NAME_PREFIX_UCASE=self.genOpts.namePrefix.upper(), DISPATCH_FUNCTION_DEFINITIONS=self.dispatchTypeDefinition), file=self.funcsFile) self.typesFile.close() self.funcsFile.close()
def endFeature(self): if self.emit: # first write all types into types.d # special treat for platform surface extension which get wraped into a version block extIndent = self.surfaceExtensionVersionIndent self.typesFileContent += "\n{0}\n".format(self.currentFeature) surfaceVersion = "" if self.isSurfaceExtension: surfaceVersion = "version({0}) {{".format( self.surfaceExtensions[self.currentFeature][0]) self.typesFileContent += "{0}\n\t{1}\n".format( surfaceVersion, self.surfaceExtensions[self.currentFeature][1]) isFirstSectionInFeature = True # for output file formating for section in self.TYPE_SECTIONS: # write contents of type section contents = self.sections[section] if contents: # check if opaque structs were registered and write tem into types file if section == 'struct': if self.opaqueStruct: for opaque in self.opaqueStruct: self.typesFileContent += "{1}struct {0};\n".format( opaque, extIndent) self.typesFileContent += '\n' elif not isFirstSectionInFeature: self.typesFileContent += '\n' # for output file formating isFirstSectionInFeature = False # write the rest of the contents, eg. enums, structs, etc. into types file for content in self.sections[section]: self.typesFileContent += "{1}{0}\n".format( content, extIndent) if self.isSurfaceExtension: self.typesFileContent += "}\n" # currently the commandPointer token is not used if self.genOpts.genFuncPointers and self.sections['commandPointer']: if self.isSurfaceExtension: write(surfaceVersion, file=self.funcsFile) write(extIndent + ('\n' + extIndent).join(self.sections['commandPointer']), file=self.funcsFile) if self.isSurfaceExtension: write("}", file=self.funcsFile) write('', file=self.funcsFile) # write function aliases into functions.d and build strings for later injection if self.sections['command']: # update indention of currentFeature for functions.d content self.currentFeature = "\t" + self.currentFeature # write the aliases to function types write("\n{0}".format(self.currentFeature), file=self.funcsFile) if self.isSurfaceExtension: write("\t" + surfaceVersion, file=self.funcsFile) write(extIndent + ('\n' + extIndent).join(self.sections['command']), file=self.funcsFile) if self.isSurfaceExtension: write("\t}", file=self.funcsFile) # capture if function is a instance or device level function inInstanceLevelFuncNames = False inDeviceLevelFuncNames = False # comment the current feature self.functionTypeDefinition += "\n\n{0}".format( self.currentFeature) # surface extension version directive if self.isSurfaceExtension: self.functionTypeDefinition += "\n\t" + surfaceVersion # create string of functionTypes functionVars for command in self.sections['command']: name = self.functionTypeName[command] self.functionTypeDefinition += "\n\t{1}PFN_{0} {0};".format( name, extIndent) # query if the current function is in instance or deviceLevelFuncNames for the next step if not inInstanceLevelFuncNames and name in self.instanceLevelFuncNames: inInstanceLevelFuncNames = True if not inDeviceLevelFuncNames and name in self.deviceLevelFuncNames: inDeviceLevelFuncNames = True # surface extension version closing curly brace if self.isSurfaceExtension: self.functionTypeDefinition += "\n\t}" # create a strings to load instance level functions if inInstanceLevelFuncNames: # comment the current feature self.instanceLevelFunctions += "\n\n{0}".format( self.currentFeature) # surface extension version directive if self.isSurfaceExtension: self.instanceLevelFunctions += "\n\t" + surfaceVersion # set of global level function names, function pointers are ignored here are set in endFile method gloablLevelFuncNames = { "vkGetInstanceProcAddr", "vkEnumerateInstanceExtensionProperties", "vkEnumerateInstanceLayerProperties", "vkCreateInstance" } # build the commands for command in self.sections['command']: name = self.functionTypeName[command] if name in self.instanceLevelFuncNames and name not in gloablLevelFuncNames: self.instanceLevelFunctions += "\n\t{1}{0} = cast(typeof({0})) vkGetInstanceProcAddr(instance, \"{0}\");".format( name, extIndent) # surface extension version closing curly brace if self.isSurfaceExtension: self.instanceLevelFunctions += "\n\t}" # create a strings to load device level functions if inDeviceLevelFuncNames: # comment the current feature self.deviceLevelFunctions += "\n\n{0}".format( self.currentFeature) # surface extension version directive if self.isSurfaceExtension: self.deviceLevelFunctions += "\n\t" + surfaceVersion # build the commands for command in self.sections['command']: name = self.functionTypeName[command] if name in self.deviceLevelFuncNames: self.deviceLevelFunctions += "\n\t{1}{0} = cast(typeof({0})) vkGet{{INSTANCE_OR_DEVICE}}ProcAddr({{instance_or_device}}, \"{0}\");".format( name, extIndent) # this function type definitions end up in the DispatchDevice struct self.dispatchTypeDefinition += "\n\t{1}PFN_{0} {0};".format( name, extIndent) # surface extension version closing curly brace if self.isSurfaceExtension: self.deviceLevelFunctions += "\n\t}" # Finish processing in superclass OutputGenerator.endFeature(self)
def dumpReg(self, maxlen = 40, filehandle = sys.stdout): """Dump all the dictionaries constructed from the Registry object""" write('***************************************', file=filehandle) write(' ** Dumping Registry contents **', file=filehandle) write('***************************************', file=filehandle) write('// Types', file=filehandle) for name in self.typedict: tobj = self.typedict[name] write(' Type', name, '->', etree.tostring(tobj.elem)[0:maxlen], file=filehandle) write('// Groups', file=filehandle) for name in self.groupdict: gobj = self.groupdict[name] write(' Group', name, '->', etree.tostring(gobj.elem)[0:maxlen], file=filehandle) write('// Enums', file=filehandle) for name in self.enumdict: eobj = self.enumdict[name] write(' Enum', name, '->', etree.tostring(eobj.elem)[0:maxlen], file=filehandle) write('// Commands', file=filehandle) for name in self.cmddict: cobj = self.cmddict[name] write(' Command', name, '->', etree.tostring(cobj.elem)[0:maxlen], file=filehandle) write('// APIs', file=filehandle) for key in self.apidict: write(' API Version ', key, '->', etree.tostring(self.apidict[key].elem)[0:maxlen], file=filehandle) write('// Extensions', file=filehandle) for key in self.extdict: write(' Extension', key, '->', etree.tostring(self.extdict[key].elem)[0:maxlen], file=filehandle)
def endFile(self): write("}}\n\n__gshared {{{0}\n}}\n".format(self.functionVars), file=self.funcsFile) write("""\ struct {NAME_PREFIX}Loader {{ @disable this(); @disable this(this); /// if not using version "with-derelict-loader" this function must be called first /// sets vkCreateInstance function pointer and acquires basic functions to retrieve information about the implementation static void loadGlobalLevelFunctions(typeof(vkGetInstanceProcAddr) getProcAddr) {{ vkGetInstanceProcAddr = getProcAddr; vkEnumerateInstanceExtensionProperties = cast(typeof(vkEnumerateInstanceExtensionProperties)) vkGetInstanceProcAddr(null, "vkEnumerateInstanceExtensionProperties"); vkEnumerateInstanceLayerProperties = cast(typeof(vkEnumerateInstanceLayerProperties)) vkGetInstanceProcAddr(null, "vkEnumerateInstanceLayerProperties"); vkCreateInstance = cast(typeof(vkCreateInstance)) vkGetInstanceProcAddr(null, "vkCreateInstance"); }} /// with a valid VkInstance call this function to retrieve additional VkInstance, VkPhysicalDevice, ... related functions static void loadInstanceLevelFunctions(VkInstance instance) {{ assert(vkGetInstanceProcAddr !is null, "Must call {NAME_PREFIX}Loader.loadGlobalLevelFunctions before {NAME_PREFIX}Loader.loadInstanceLevelFunctions");\ """.format(NAME_PREFIX = self.genOpts.namePrefix) + self.instanceLevelFunctions, file=self.funcsFile) write("""\ }} /// with a valid VkInstance call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call indirectly through the VkInstance and will be internally dispatched by the implementation static void loadDeviceLevelFunctions(VkInstance instance) {{ assert(vkGetInstanceProcAddr !is null, "Must call {NAME_PREFIX}Loader.loadInstanceLevelFunctions before {NAME_PREFIX}Loader.loadDeviceLevelFunctions");\ """.format(NAME_PREFIX = self.genOpts.namePrefix) + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE = "Instance", instance_or_device = "instance"), file=self.funcsFile) write("""\ }} /// with a valid VkDevice call this function to retrieve VkDevice, VkQueue and VkCommandBuffer related functions /// the functions call directly VkDevice and related resources and must be retrieved once per logical VkDevice static void loadDeviceLevelFunctions(VkDevice device) {{ assert(vkGetDeviceProcAddr !is null, "Must call {NAME_PREFIX}Loader.loadInstanceLevelFunctions before {NAME_PREFIX}Loader.loadDeviceLevelFunctions");\ """.format(NAME_PREFIX = self.genOpts.namePrefix) + self.deviceLevelFunctions.format(INSTANCE_OR_DEVICE = "Device", instance_or_device = "device"), file=self.funcsFile) write("""\ }} }} version({NAME_PREFIX}LoadFromDerelict) {{ import derelict.util.loader; import derelict.util.system; private {{ version(Windows) enum libNames = "vulkan-1.dll"; else static assert(0,"Need to implement Vulkan libNames for this operating system."); }} class {NAME_PREFIX}DerelictLoader : SharedLibLoader {{ this() {{ super(libNames); }} protected override void loadSymbols() {{ typeof(vkGetInstanceProcAddr) getProcAddr; bindFunc(cast(void**)&getProcAddr, "vkGetInstanceProcAddr"); {NAME_PREFIX}Loader.loadGlobalLevelFunctions(getProcAddr); }} }} __gshared {NAME_PREFIX}DerelictLoader {NAME_PREFIX}Derelict; shared static this() {{ {NAME_PREFIX}Derelict = new {NAME_PREFIX}DerelictLoader(); }} }} """.format(NAME_PREFIX = self.genOpts.namePrefix), file=self.funcsFile) self.typesFile.close() self.funcsFile.close()