def GetStdNamespace(): pod_attributes = {'binding_model': 'pod'} source_file = idl_parser.File('<internal>') source_file.header = "common.h" source_file.npapi_cpp = None source_file.npapi_header = None source = idl_parser.SourceLocation(source_file, 0) defn_list = [ NativeType(source, pod_attributes, 'string', 'string'), NativeType(source, pod_attributes, 'wstring', 'wstring') ] return syntax_tree.Namespace(source, [], 'std', defn_list)
def GetNativeTypes(): pod_attributes = {'binding_model': 'pod'} source_file = idl_parser.File('<internal>') source_file.header = None source_file.npapi_cpp = None source_file.npapi_header = None source = idl_parser.SourceLocation(source_file, 0) return [ NativeType(source, pod_attributes, 'void', 'void'), NativeType(source, pod_attributes, 'int', 'int'), NativeType(source, pod_attributes, 'unsigned int', 'int'), NativeType(source, pod_attributes, 'size_t', 'int'), NativeType(source, pod_attributes, 'bool', 'bool'), NativeType(source, pod_attributes, 'float', 'float'), NativeType(source, pod_attributes, 'double', 'float'), NativeType(source, pod_attributes, 'Variant', 'variant'), GetStdNamespace() ]
def main(argv): files = argv[1:] # generate a hash of all the inputs to figure out if we need to re-generate # the outputs. # Use hashlib if present (Python 2.5 and up), otherwise fall back to md5. if globals().has_key('hashlib'): md5_hash = hashlib.md5() else: md5_hash = md5.new() # hash the input files and the source python files (globbing *.py in the # directory of this file) for source_file in files + glob.glob( os.path.join(os.path.dirname(__file__), '*.py')): md5_hash.update(open(source_file).read()) # hash the options since they may affect the output for s in (FLAGS['generator-module'].value + FLAGS['binding-module'].value + FLAGS.generate + [FLAGS['output-dir'].value]): md5_hash.update(s) # import generator and binding model modules, and hash them AddModulesFromFlags(generators, FLAGS['generator-module'].value, md5_hash) AddModulesFromFlags(binding_models, FLAGS['binding-module'].value, md5_hash) output_dir = FLAGS['output-dir'].value if not os.path.isdir(output_dir): os.makedirs(output_dir) hash_filename = os.path.join(output_dir, 'hash') hash_value = md5_hash.hexdigest() if not FLAGS.force: try: hash_file = open(hash_filename, 'r') # Don't read while others are writing... if FLAGS['exclusive-lock'].value: locking.lockf(hash_file, locking.LOCK_SH) old_hash = hash_file.read() if FLAGS['exclusive-lock'].value: locking.lockf(hash_file, locking.LOCK_UN) hash_file.close() if hash_value == old_hash: print "Source files haven't changed: nothing to generate." return except IOError: # Could not load the hash file, so there must be stuff to # generate. pass hash_file = open(hash_filename, 'w') if FLAGS['exclusive-lock'].value: locking.lockf(hash_file, locking.LOCK_EX) my_parser = idl_parser.Parser(output_dir) pairs = [] for f in files: idl_file = idl_parser.File(f) defn = my_parser.Parse(idl_file) pairs.append((idl_file, defn)) definitions = sum([defn for (f, defn) in pairs], []) + GetNativeTypes() global_namespace = syntax_tree.Namespace(None, [], '', definitions) syntax_tree.FinalizeObjects(global_namespace, binding_models) writer_list = [] for generator_name in FLAGS.generate: try: generator = generators[generator_name] writer_list += generator.ProcessFiles(output_dir, pairs, global_namespace) except KeyError: print 'Unknown generator %s.' % generator_name raise for writer in writer_list: writer.Write() # Save hash for next time hash_file.write(hash_value) if FLAGS['exclusive-lock'].value: locking.lockf(hash_file, locking.LOCK_UN) hash_file.close() log.FailIfHaveErrors()