def build_metamodel(self, id_generator=None): ''' Build and return a *xtuml.MetaModel* containing previously loaded input. ''' m = xtuml.MetaModel(id_generator) self.populate(m) return m
def run_build(working_directory='.', gen_workspace='code_generation', output_directory='src', variant='c', model_inputs=[]): # setup build print('MC-3020: Setting up build environment...') working_directory = os.path.abspath( working_directory) # resolve working directory path gen_workspace = os.path.abspath( gen_workspace) # resolve gen workspace path os.environ['ROX_MC_ARC_DIR'] = os.path.join( gen_workspace, 'arc') # set archetype directory output_directory = os.path.abspath(output_directory) # resolve output path if not os.path.exists(output_directory): os.makedirs(output_directory) # prepare gen workspace print('MC-3020: Preparing generation workspace...') if os.path.exists(gen_workspace): shutil.rmtree(gen_workspace) if not os.path.exists(gen_workspace): os.makedirs(gen_workspace) # copy archetypes print('MC-3020: Installing model compiler archetypes...') if not os.path.exists(os.path.join(gen_workspace, 'arc')): os.makedirs(os.path.join(gen_workspace, 'arc')) for arcfile in filter( lambda path: not os.path.isdir(os.path.join(ARCDIR, path)), os.listdir(ARCDIR)): shutil.copyfile(os.path.join(ARCDIR, arcfile), os.path.join(gen_workspace, 'arc', arcfile)) if os.path.exists(os.path.join(ARCDIR, variant)) and os.path.isdir( os.path.join(ARCDIR, variant)): for arcfile in filter( lambda path: not os.path.isdir( os.path.join(ARCDIR, variant, path)), os.listdir(os.path.join(ARCDIR, variant))): shutil.copyfile(os.path.join(ARCDIR, variant, arcfile), os.path.join(gen_workspace, 'arc', arcfile)) # copy marking files print('MC-3020: Installing user marks...') for markfile in filter(lambda path: os.path.splitext(path)[1] == '.mark', os.listdir(os.path.join(SCHEMADIR, 'colors'))): shutil.copyfile(os.path.join(SCHEMADIR, 'colors', markfile), os.path.join(gen_workspace, markfile)) shutil.copyfile(os.path.join(SCHEMADIR, 'colors', 'sys_functions.arc'), os.path.join(gen_workspace, 'sys_functions.arc')) for user_markfile in filter( lambda path: os.path.splitext(path)[1] == '.mark', os.listdir(working_directory)): shutil.copyfile(os.path.join(working_directory, user_markfile), os.path.join(gen_workspace, user_markfile)) # execute pre-build print('MC-3020: Pre-building...') model = bridgepoint.load_metamodel(model_inputs) bridgepoint.prebuild_model(model) xtuml.persist_instances(model, os.path.join(gen_workspace, 'a.sql')) # execute code generation print('MC-3020: Generating code...') os.chdir(gen_workspace) id_generator = xtuml.IntegerGenerator() model = xtuml.MetaModel(id_generator) loader = xtuml.ModelLoader() loader.filename_input(os.path.join(SCHEMADIR, 'sql', 'xtumlmc_schema.sql')) loader.filename_input(os.path.join(gen_workspace, 'a.sql')) loader.populate(model) rt = rsl.Runtime(model, 'change', True, None) ast = rsl.parse_file(os.path.join(gen_workspace, 'arc', 'sys.arc')) rsl.evaluate(rt, ast, ['.']) # copy generated sources to output directory print('MC-3020: Installing generated sources...') custom_implementations = [] try: with open(os.path.join(working_directory, 'custom.txt')) as f: custom_implementations = list( map(lambda s: s.strip(), f.readlines())) except: print('MC-3020: No custom sources found...') for src_file in filter( lambda path: os.path.splitext(path)[1] in ['.h', '.c'], os.listdir(os.path.join(gen_workspace, '_ch'))): if src_file in custom_implementations: shutil.copyfile(os.path.join(gen_workspace, '_ch', src_file), os.path.join(output_directory, src_file + '.orig')) else: shutil.copyfile(os.path.join(gen_workspace, '_ch', src_file), os.path.join(output_directory, src_file)) print('MC-3020: Done.')
def setUp(self): id_generator = xtuml.IntegerGenerator() self.metamodel = xtuml.MetaModel(id_generator) self.runtime = rsl.runtime.Runtime(self.metamodel) self.includes = ['./']
def setUp(self): self.metamodel = xtuml.MetaModel()
def main(argv=None): loglevel = logging.INFO database_filename = 'mcdbms.gen' enable_persistance = True dump_sql_file = '' force_overwrite = False emit_when = 'change' diff_filename = None inputs = list() includes = ['.'] check_integrity = False argv = argv or sys.argv quiet_insert_mismatch = False i = 1 while i < len(argv): if argv[i] == '-arch': i += 1 inputs.append((argv[i], 'arc')) elif argv[i] == '-import': i += 1 inputs.append((argv[i], 'sql')) elif argv[i] == '-include': i += 1 includes.append(argv[i]) elif argv[i] == '-emit': i += 1 emit_when = argv[i] elif argv[i] == '-f': i += 1 database_filename = argv[i] elif argv[i] == '-force': force_overwrite = True elif argv[i] == '-integrity': check_integrity = True elif argv[i] == '-diff': i += 1 diff_filename = argv[i] elif argv[i] == '-nopersist': enable_persistance = False elif argv[i] == '-dumpsql': i += 1 dump_sql_file = argv[i] elif argv[i] == '-v': i += 1 loglevel = logging.DEBUG elif argv[i] == '-qim': quiet_insert_mismatch = True elif argv[i] == '-version': print(rsl.version.complete_string) sys.exit(0) elif argv[i] == '-h': print(complete_usage % argv[0]) sys.exit(0) elif argv[i] in ['//', '-ignore_rest']: break # ignore these options elif argv[i] in [ '-lVHs', '-lSCs', '-l2b', '-l2s', '-l3b', '-l3s', '-q', '-l' ]: pass # ignore these options (which expects a following value) elif argv[i] in ['-d', '-priority', '-e', '-t', '-#']: i += 1 else: print("PARSE ERROR: Argument: %s" % argv[i]) print("Couldn't find match for argument") print(brief_usage % (argv[0], argv[0])) sys.exit(1) i += 1 logging.basicConfig(stream=sys.stdout, level=loglevel) id_generator = xtuml.IntegerGenerator() metamodel = xtuml.MetaModel(id_generator) loader = xtuml.ModelLoader() if quiet_insert_mismatch: load_logger = logging.getLogger(xtuml.load.__name__) load_logger.setLevel(logging.ERROR) if diff_filename: with open(diff_filename, 'w') as f: f.write(' '.join(argv)) f.write('\n') if enable_persistance and os.path.isfile(database_filename): loader.filename_input(database_filename) for filename, kind in inputs: if kind == 'sql': loader.filename_input(filename) elif kind == 'arc': loader.populate(metamodel) rt = rsl.Runtime(metamodel, emit_when, force_overwrite, diff_filename) ast = rsl.parse_file(filename) rsl.evaluate(rt, ast, includes) loader = xtuml.ModelLoader() else: #should not happen print("Unknown %s is of unknown kind '%s', skipping it" % (filename, kind)) errors = 0 if check_integrity: errors += xtuml.check_association_integrity(metamodel) errors += xtuml.check_uniqueness_constraint(metamodel) if enable_persistance: xtuml.persist_database(metamodel, database_filename) if dump_sql_file != '': xtuml.persist_instances(metamodel, dump_sql_file) return errors