def run(tree, args): """Entrypoint to the C++ backend""" global run_before if run_before: util.fatalError("Sorry, the C++ backend cannot process more " "than one IDL file at a time.") run_before = 1 dirname, filename = os.path.split(tree.file()) basename, ext = os.path.splitext(filename) config.state["Basename"] = basename config.state["Directory"] = dirname process_args(args) try: # Check the input tree only contains stuff we understand support.checkIDL(tree) # initialise the handy ast module ast.__init__(tree) # Initialise the descriptor generating code descriptor.__init__(tree) # Build the map of AST nodes to Environments tree.accept(id.WalkTree()) # AMI code hooks into existing infrastructure (ie doesn't need to # be driven explicitly here) # if config.state['AMI']: # tree = ami.__init__(tree) # tree.accept(id.WalkTree()) # Not ported yet. header.run(tree) skel.run(tree) # if we're generating code for Typecodes and Any then # we need to create the DynSK.cc file if config.state["Typecode"]: dynskel.run(tree) if config.state["Example Code"]: impl.run(tree) except AttributeError, e: name = e[0] unsupported_visitors = map(lambda x: "visit" + x, AST_unsupported_nodes[:]) if name in unsupported_visitors: # delete all possibly partial output files for file in output.listAllCreatedFiles(): os.unlink(file) util.unsupportedIDL() raise
def run(tree, args): """Entrypoint to the C++ backend""" global run_before if run_before: util.fatalError("Sorry, the C++ backend cannot process more " "than one IDL file at a time.") run_before = 1 # Initialise modules that would otherwise contain circular imports call.init() iface.init() dirname, filename = os.path.split(tree.file()) basename,ext = os.path.splitext(filename) config.state['Basename'] = basename config.state['Directory'] = dirname process_args(args) try: # Check the input tree only contains stuff we understand support.checkIDL(tree) # Add AMI implied IDL if required if config.state['AMI']: from omniidl_be import ami tree.accept(ami.AMIVisitor()) # initialise the handy ast module ast.__init__(tree) # Initialise the descriptor generating code descriptor.__init__(tree) # Build the map of AST nodes to Environments tree.accept(id.WalkTree()) header.run(tree) skel.run(tree) # if we're generating code for Typecodes and Any then # we need to create the DynSK.cc file if config.state['Typecode']: dynskel.run(tree) if config.state['Example Code']: impl.run(tree) except SystemExit: # fatalError function throws SystemExit exception # delete all possibly partial output files for file in output.listAllCreatedFiles(): os.unlink(file) raise
def run(tree, backend_args): # Process arguments dirname, filename = os.path.split(tree.file()) basename,ext = os.path.splitext(filename) config.state['Basename'] = basename config.state['Directory'] = dirname process_args(backend_args) try: # Check the input tree only contains stuff we understand support.checkIDL(tree) # initialise the handy ast module ast.__init__(tree) # Initialise the descriptor generating code descriptor.__init__(tree) # Build the map of AST nodes to Environments tree.accept(id.WalkTree()) hh_filename = config.state['Basename'] + config.state['HH Suffix'] hpp_filename = config.state['Basename'] + config.state['HPP Suffix'] hxx_filename = config.state['Basename'] + config.state['HXX Suffix'] cc_filename = config.state['Basename'] + config.state['CC Suffix'] if config.state["Include Prefix"]: prefix = config.state['Include Prefix'] + '/' else: prefix = "" idl_filename = tree.file() hpp_stream = output.Stream(output.createFile(hpp_filename), 2) hxx_stream = output.Stream(output.createFile(hxx_filename), 2) cc_stream = output.Stream(output.createFile( cc_filename), 2) main.self = main main.__init__(hpp_stream, hxx_stream, cc_stream, idl_filename, prefix, hh_filename, hpp_filename, hxx_filename) main.run(tree) hpp_stream .close() hxx_stream.close() cc_stream .close() except AttributeError as e: name = e.args[0] unsupported_visitors = map(lambda x:"visit" + x, AST_unsupported_nodes[:]) if name in unsupported_visitors: # delete all possibly partial output files for file in output.listAllCreatedFiles(): os.unlink(file) util.unsupportedIDL() raise except SystemExit as e: # fatalError function throws SystemExit exception # delete all possibly partial output files for file in output.listAllCreatedFiles(): os.unlink(file) raise
def run(tree, args): """Entrypoint to the doil backend""" global run_before if run_before: util.fatalError("Sorry, the doil backend cannot process more " "than one IDL file at a time.") run_before = 1 dirname, filename = os.path.split(tree.file()) basename,ext = os.path.splitext(filename) config.state['Basename'] = basename config.state['Directory'] = dirname process_args(args) if config.state['MappingFile'] == '': config.state['TypeMapping'] = {} else: import yaml mapping_file = config.state['MappingFile'] f = open(mapping_file, "r") mapping = yaml.load(f.read()) f.close() config.state['TypeMapping'] = mapping try: # Check the input tree only contains stuff we understand support.checkIDL(tree) # initialise the handy ast module ast.__init__(tree) tree.accept(id.WalkTree()) # Initialise the descriptor generating code # currently doil does not use descriptor # descriptor.__init__(tree) from omniidl_be.doil import dictbuilder dict = dictbuilder.run(tree, config.state) if config.state['Interface']: from omniidl_be.doil import interface interface.generate_interface(dict) interface.generate_types(dict) if config.state['CORBAServant']: from omniidl_be.doil import corba corba.generate_servant(dict) corba.generate_types(dict) if config.state['CORBAAdapter']: from omniidl_be.doil import corba corba.generate_adapter(dict) corba.generate_types(dict) if config.state['CORBAProxy']: from omniidl_be.doil import corba corba.generate_proxy(dict) corba.generate_types(dict) if config.state['IceSlice']: from omniidl_be.doil import ice ice.generate_slice(dict) if config.state['IceAdapter']: from omniidl_be.doil import ice ice.generate_adapter(dict) if config.state['IceProxy']: from omniidl_be.doil import ice ice.generate_proxy(dict) if config.state['IceServant']: from omniidl_be.doil import ice ice.generate_servant(dict) except AttributeError, e: name = e[0] unsupported_visitors = map(lambda x:"visit" + x, AST_unsupported_nodes[:]) if name in unsupported_visitors: util.unsupportedIDL() util.fatalError("An AttributeError exception was caught")
def run(tree, args): """Entrypoint to the C++ backend""" global run_before if run_before: util.fatalError("Sorry, the C++ backend cannot process more " "than one IDL file at a time.") run_before = 1 dirname, filename = os.path.split(tree.file()) basename, ext = os.path.splitext(filename) config.state['Basename'] = basename config.state['Directory'] = dirname process_args(args) try: # Check the input tree only contains stuff we understand support.checkIDL(tree) # initialise the handy ast module ast.__init__(tree) # Initialise the descriptor generating code descriptor.__init__(tree) # Build the map of AST nodes to Environments tree.accept(id.WalkTree()) # AMI code hooks into existing infrastructure (ie doesn't need to # be driven explicitly here) #if config.state['AMI']: # tree = ami.__init__(tree) # tree.accept(id.WalkTree()) # Not ported yet. header.run(tree) skel.run(tree) # if we're generating code for Typecodes and Any then # we need to create the DynSK.cc file if config.state['Typecode']: dynskel.run(tree) if config.state['Example Code']: impl.run(tree) except AttributeError, e: name = e.args[0] unsupported_visitors = map(lambda x: "visit" + x, AST_unsupported_nodes[:]) if name in unsupported_visitors: # delete all possibly partial output files for file in output.listAllCreatedFiles(): os.unlink(file) util.unsupportedIDL() raise
def run(tree, backend_args): # Process arguments dirname, filename = os.path.split(tree.file()) basename, ext = os.path.splitext(filename) config.state["Basename"] = basename config.state["Directory"] = dirname process_args(backend_args) try: # Check the input tree only contains stuff we understand support.checkIDL(tree) # initialise the handy ast module ast.__init__(tree) # Initialise the descriptor generating code descriptor.__init__(tree) # Build the map of AST nodes to Environments tree.accept(id.WalkTree()) hh_filename = config.state["Basename"] + config.state["HH Suffix"] hpp_filename = config.state["Basename"] + config.state["HPP Suffix"] hxx_filename = config.state["Basename"] + config.state["HXX Suffix"] cc_filename = config.state["Basename"] + config.state["CC Suffix"] if config.state["Include Prefix"]: prefix = config.state["Include Prefix"] + "/" else: prefix = "" idl_filename = tree.file() hpp_stream = output.Stream(output.createFile(hpp_filename), 2) hxx_stream = output.Stream(output.createFile(hxx_filename), 2) cc_stream = output.Stream(output.createFile(cc_filename), 2) main.self = main main.__init__( hpp_stream, hxx_stream, cc_stream, idl_filename, prefix, hh_filename, hpp_filename, hxx_filename, ) main.run(tree) hpp_stream.close() hxx_stream.close() cc_stream.close() except AttributeError as e: name = e.args[0] unsupported_visitors = map(lambda x: "visit" + x, AST_unsupported_nodes[:]) if name in unsupported_visitors: # delete all possibly partial output files for file in output.listAllCreatedFiles(): os.unlink(file) util.unsupportedIDL() raise except SystemExit: # fatalError function throws SystemExit exception # delete all possibly partial output files for file in output.listAllCreatedFiles(): os.unlink(file) raise