def test_serialize_default_values(self): schema = ''' CREATE TABLE X (BOOLEAN BOOLEAN, INTEGER INTEGER, REAL REAL, STRING STRING, UNIQUE_ID UNIQUE_ID); ''' loader = xtuml.ModelLoader() loader.input(schema) id_generator = xtuml.IntegerGenerator() m = loader.build_metamodel(id_generator) m.new('X') s = xtuml.serialize_instances(m) loader = xtuml.ModelLoader() loader.input(schema) loader.input(s) id_generator = xtuml.IntegerGenerator() m = loader.build_metamodel(id_generator) x = m.select_any('X') self.assertEqual(x.BOOLEAN, False) self.assertEqual(x.INTEGER, 0) self.assertEqual(x.REAL, 0.0) self.assertEqual(x.UNIQUE_ID, 1)
def test_serialize_schema(self): schema = ''' CREATE TABLE X (BOOLEAN BOOLEAN, INTEGER INTEGER, REAL REAL, STRING STRING, UNIQUE_ID UNIQUE_ID, Next UNIQUE_ID); CREATE ROP REF_ID R1 FROM 1C X ( Next ) PHRASE 'precedes' TO 1C X ( UNIQUE_ID ) PHRASE 'succeeds'; ''' loader = xtuml.ModelLoader() loader.input(schema) m = loader.build_metamodel() x1 = m.new('X', Boolean=True, Integer=4, String='str') x2 = m.new('X', Boolean=True, Integer=4, String='str') xtuml.relate(x1, x2, 1, 'precedes') s = xtuml.serialize_schema(m) loader = xtuml.ModelLoader() loader.input(s) m = loader.build_metamodel() self.assertFalse(m.select_any('X')) x1 = m.new('X', Boolean=True, Integer=4, String='str') x2 = m.new('X', Boolean=True, Integer=4, String='str') xtuml.relate(x1, x2, 1, 'succeeds') self.assertTrue(xtuml.navigate_one(x1).X[1, 'succeeds']())
def test_serialize(self): schema = ''' CREATE TABLE X (BOOLEAN BOOLEAN, INTEGER INTEGER, REAL REAL, STRING STRING, UNIQUE_ID UNIQUE_ID); ''' loader = xtuml.ModelLoader() loader.input(schema) m = loader.build_metamodel() m.new('X', BOOLEAN=True, INTEGER=1, REAL=-5.5, UNIQUE_ID=1) s = xtuml.serialize_instances(m) loader = xtuml.ModelLoader() loader.input(schema) loader.input(s) m = loader.build_metamodel() x = m.select_any('X') self.assertEqual(x.BOOLEAN, True) self.assertEqual(x.INTEGER, 1) self.assertEqual(x.REAL, -5.5) self.assertEqual(x.UNIQUE_ID, 1) self.assertIsInstance(x.BOOLEAN, bool) self.assertIsInstance(x.INTEGER, int) self.assertIsInstance(x.REAL, float)
def compare_wrapper(self, *args, **kwargs): loader = xtuml.ModelLoader() loader.input(fn.__doc__) m1 = loader.build_metamodel() s = xtuml.serialize_schema(m1) loader = xtuml.ModelLoader() loader.input(s) m2 = loader.build_metamodel() self.assertTrue(compare_metamodel_classes(m1, m2)) fn(self)
def test_persist_database(self): schema = ''' CREATE TABLE X (BOOLEAN BOOLEAN, INTEGER INTEGER, REAL REAL, STRING STRING, UNIQUE_ID UNIQUE_ID, Next UNIQUE_ID); CREATE ROP REF_ID R1 FROM 1C X ( Next ) PHRASE 'precedes' TO 1C X ( UNIQUE_ID ) PHRASE 'succeeds'; ''' loader = xtuml.ModelLoader() loader.input(schema) m = loader.build_metamodel() m.new('X', Boolean=True, Integer=4, String='str', Uniquie_Id=5) s = xtuml.serialize(m) (_, filename) = tempfile.mkstemp() try: xtuml.persist_database(m, filename) with open(filename) as f: self.assertEqual(s, f.read()) finally: atexit.register(os.remove, filename)
def load_wrapper(self, *args, **kwargs): try: loader = xtuml.ModelLoader() loader.input(fn.__doc__) res = loader.build_metamodel() except Exception as ex: res = ex fn(self, res)
def run(self): import xtuml from bridgepoint import oal l = xtuml.ModelLoader() l.input('', name='<empty string>') l.build_metamodel() oal.parse('') build_py.run(self)
def test_serialize_undefined_table(self): schema = ''' CREATE TABLE X ( _0 UNIQUE_ID, _1 STRING, _2 STRING, _3 INTEGER, _4 INTEGER, _5 BOOLEAN, _6 BOOLEAN, _7 INTEGER, _8 REAL, _9 REAL ); ''' instances = ''' INSERT INTO X VALUES ( "00000000-0000-0000-0000-000000000000", 'TE''ST', 'test', 1, 0, false, true, -5, 1.543, -0.543 ); ''' loader = xtuml.ModelLoader() loader.input(instances) m = loader.build_metamodel() s1 = xtuml.serialize_database(m) loader = xtuml.ModelLoader() loader.input(schema) loader.input(instances) m = loader.build_metamodel() s2 = xtuml.serialize_database(m) self.assertEqual(s1, s2)
def test_serialize_attribute_named_self(self): schema = ''' CREATE TABLE X (self UNIQUE_ID); ''' loader = xtuml.ModelLoader() loader.input(schema) m = loader.build_metamodel() m.new('X', 1) s = xtuml.serialize_instances(m) loader = xtuml.ModelLoader() loader.input(schema) loader.input(s) m = loader.build_metamodel() x = m.select_any('X') self.assertEqual(x.self, 1)
def test_serialize_unique_identifiers(self): schema = ''' CREATE TABLE X (s1 STRING, s2 STRING); CREATE UNIQUE INDEX I1 ON X (s1, s2); ''' loader = xtuml.ModelLoader() loader.input(schema) m = loader.build_metamodel() s = xtuml.serialize_schema(m) s += xtuml.serialize_unique_identifiers(m) loader = xtuml.ModelLoader() loader.input(s) m = loader.build_metamodel() x1 = m.new('X', s1='s1', s2='s2') x2 = m.new('X', s1='s1', s2='s2') self.assertFalse(m.is_consistent()) x2.s2 = 'S2' self.assertTrue(m.is_consistent())
def test_persist_default_values(self): schema = ''' CREATE TABLE X (BOOLEAN BOOLEAN, INTEGER INTEGER, REAL REAL, STRING STRING, UNIQUE_ID UNIQUE_ID); ''' loader = xtuml.ModelLoader() loader.input(schema) id_generator = xtuml.IntegerGenerator() m = loader.build_metamodel(id_generator) m.new('X') s = xtuml.serialize_instances(m) (_, filename) = tempfile.mkstemp() try: xtuml.persist_instances(m, filename) with open(filename) as f: self.assertEqual(s, f.read()) finally: atexit.register(os.remove, filename) loader = xtuml.ModelLoader() loader.input(schema) loader.input(s) id_generator = xtuml.IntegerGenerator() m = loader.build_metamodel(id_generator) x = m.select_any('X') self.assertEqual(x.BOOLEAN, False) self.assertEqual(x.INTEGER, 0) self.assertEqual(x.REAL, 0.0) self.assertEqual(x.UNIQUE_ID, 1)
def test_implicit_serialize(self): schema = ''' CREATE TABLE X (BOOLEAN BOOLEAN, INTEGER INTEGER, REAL REAL, STRING STRING, UNIQUE_ID UNIQUE_ID, Next UNIQUE_ID); CREATE ROP REF_ID R1 FROM 1C X ( Next ) PHRASE 'precedes' TO 1C X ( UNIQUE_ID ) PHRASE 'succeeds'; ''' loader = xtuml.ModelLoader() loader.input(schema) m = loader.build_metamodel() X = m.find_class('X') s1 = xtuml.serialize(X) s2 = xtuml.serialize_class(X) self.assertTrue(s1) self.assertEqual(s1, s2) R1 = m.associations[0] s1 = xtuml.serialize(R1) s2 = xtuml.serialize_association(R1) self.assertTrue(s1) self.assertEqual(s1, s2) x = m.new('X', Boolean=True, Integer=4, String='str') s1 = xtuml.serialize(x) s2 = xtuml.serialize_instance(x) self.assertTrue(s1) self.assertEqual(s1, s2) s1 = xtuml.serialize(m) s2 = xtuml.serialize_database(m) self.assertTrue(s1) self.assertEqual(s1, s2)
def setUp(self): l = xtuml.ModelLoader() l.input(self.__class__.__doc__) self.m = l.build_metamodel()
def run_build(working_directory='.', gen_workspace='code_generation', output_directory='src', variant='c', model_inputs=[]): # setup build print('MC-3020: Setting up build environment...') working_directory = os.path.abspath( working_directory) # resolve working directory path gen_workspace = os.path.abspath( gen_workspace) # resolve gen workspace path os.environ['ROX_MC_ARC_DIR'] = os.path.join( gen_workspace, 'arc') # set archetype directory output_directory = os.path.abspath(output_directory) # resolve output path if not os.path.exists(output_directory): os.makedirs(output_directory) # prepare gen workspace print('MC-3020: Preparing generation workspace...') if os.path.exists(gen_workspace): shutil.rmtree(gen_workspace) if not os.path.exists(gen_workspace): os.makedirs(gen_workspace) # copy archetypes print('MC-3020: Installing model compiler archetypes...') if not os.path.exists(os.path.join(gen_workspace, 'arc')): os.makedirs(os.path.join(gen_workspace, 'arc')) for arcfile in filter( lambda path: not os.path.isdir(os.path.join(ARCDIR, path)), os.listdir(ARCDIR)): shutil.copyfile(os.path.join(ARCDIR, arcfile), os.path.join(gen_workspace, 'arc', arcfile)) if os.path.exists(os.path.join(ARCDIR, variant)) and os.path.isdir( os.path.join(ARCDIR, variant)): for arcfile in filter( lambda path: not os.path.isdir( os.path.join(ARCDIR, variant, path)), os.listdir(os.path.join(ARCDIR, variant))): shutil.copyfile(os.path.join(ARCDIR, variant, arcfile), os.path.join(gen_workspace, 'arc', arcfile)) # copy marking files print('MC-3020: Installing user marks...') for markfile in filter(lambda path: os.path.splitext(path)[1] == '.mark', os.listdir(os.path.join(SCHEMADIR, 'colors'))): shutil.copyfile(os.path.join(SCHEMADIR, 'colors', markfile), os.path.join(gen_workspace, markfile)) shutil.copyfile(os.path.join(SCHEMADIR, 'colors', 'sys_functions.arc'), os.path.join(gen_workspace, 'sys_functions.arc')) for user_markfile in filter( lambda path: os.path.splitext(path)[1] == '.mark', os.listdir(working_directory)): shutil.copyfile(os.path.join(working_directory, user_markfile), os.path.join(gen_workspace, user_markfile)) # execute pre-build print('MC-3020: Pre-building...') model = bridgepoint.load_metamodel(model_inputs) bridgepoint.prebuild_model(model) xtuml.persist_instances(model, os.path.join(gen_workspace, 'a.sql')) # execute code generation print('MC-3020: Generating code...') os.chdir(gen_workspace) id_generator = xtuml.IntegerGenerator() model = xtuml.MetaModel(id_generator) loader = xtuml.ModelLoader() loader.filename_input(os.path.join(SCHEMADIR, 'sql', 'xtumlmc_schema.sql')) loader.filename_input(os.path.join(gen_workspace, 'a.sql')) loader.populate(model) rt = rsl.Runtime(model, 'change', True, None) ast = rsl.parse_file(os.path.join(gen_workspace, 'arc', 'sys.arc')) rsl.evaluate(rt, ast, ['.']) # copy generated sources to output directory print('MC-3020: Installing generated sources...') custom_implementations = [] try: with open(os.path.join(working_directory, 'custom.txt')) as f: custom_implementations = list( map(lambda s: s.strip(), f.readlines())) except: print('MC-3020: No custom sources found...') for src_file in filter( lambda path: os.path.splitext(path)[1] in ['.h', '.c'], os.listdir(os.path.join(gen_workspace, '_ch'))): if src_file in custom_implementations: shutil.copyfile(os.path.join(gen_workspace, '_ch', src_file), os.path.join(output_directory, src_file + '.orig')) else: shutil.copyfile(os.path.join(gen_workspace, '_ch', src_file), os.path.join(output_directory, src_file)) print('MC-3020: Done.')
def main(argv=None): loglevel = logging.INFO database_filename = 'mcdbms.gen' enable_persistance = True dump_sql_file = '' force_overwrite = False emit_when = 'change' diff_filename = None inputs = list() includes = ['.'] check_integrity = False argv = argv or sys.argv quiet_insert_mismatch = False i = 1 while i < len(argv): if argv[i] == '-arch': i += 1 inputs.append((argv[i], 'arc')) elif argv[i] == '-import': i += 1 inputs.append((argv[i], 'sql')) elif argv[i] == '-include': i += 1 includes.append(argv[i]) elif argv[i] == '-emit': i += 1 emit_when = argv[i] elif argv[i] == '-f': i += 1 database_filename = argv[i] elif argv[i] == '-force': force_overwrite = True elif argv[i] == '-integrity': check_integrity = True elif argv[i] == '-diff': i += 1 diff_filename = argv[i] elif argv[i] == '-nopersist': enable_persistance = False elif argv[i] == '-dumpsql': i += 1 dump_sql_file = argv[i] elif argv[i] == '-v': i += 1 loglevel = logging.DEBUG elif argv[i] == '-qim': quiet_insert_mismatch = True elif argv[i] == '-version': print(rsl.version.complete_string) sys.exit(0) elif argv[i] == '-h': print(complete_usage % argv[0]) sys.exit(0) elif argv[i] in ['//', '-ignore_rest']: break # ignore these options elif argv[i] in [ '-lVHs', '-lSCs', '-l2b', '-l2s', '-l3b', '-l3s', '-q', '-l' ]: pass # ignore these options (which expects a following value) elif argv[i] in ['-d', '-priority', '-e', '-t', '-#']: i += 1 else: print("PARSE ERROR: Argument: %s" % argv[i]) print("Couldn't find match for argument") print(brief_usage % (argv[0], argv[0])) sys.exit(1) i += 1 logging.basicConfig(stream=sys.stdout, level=loglevel) id_generator = xtuml.IntegerGenerator() metamodel = xtuml.MetaModel(id_generator) loader = xtuml.ModelLoader() if quiet_insert_mismatch: load_logger = logging.getLogger(xtuml.load.__name__) load_logger.setLevel(logging.ERROR) if diff_filename: with open(diff_filename, 'w') as f: f.write(' '.join(argv)) f.write('\n') if enable_persistance and os.path.isfile(database_filename): loader.filename_input(database_filename) for filename, kind in inputs: if kind == 'sql': loader.filename_input(filename) elif kind == 'arc': loader.populate(metamodel) rt = rsl.Runtime(metamodel, emit_when, force_overwrite, diff_filename) ast = rsl.parse_file(filename) rsl.evaluate(rt, ast, includes) loader = xtuml.ModelLoader() else: #should not happen print("Unknown %s is of unknown kind '%s', skipping it" % (filename, kind)) errors = 0 if check_integrity: errors += xtuml.check_association_integrity(metamodel) errors += xtuml.check_uniqueness_constraint(metamodel) if enable_persistance: xtuml.persist_database(metamodel, database_filename) if dump_sql_file != '': xtuml.persist_instances(metamodel, dump_sql_file) return errors
def main(args): parser = optparse.OptionParser( usage="%prog [options] <sql_file> [another_sql_file...].", version=xtuml.version.complete_string, formatter=optparse.TitledHelpFormatter()) parser.set_description(__doc__.strip()) parser.add_option( "-r", "-R", dest="rel_ids", type='int', metavar="<number>", help="limit consistency check to one or more associations", action="append", default=[]) parser.add_option( "-k", dest="kinds", type='string', metavar="<key letter>", help= "limit check for uniqueness constraint violations to one or more classes", action="append", default=[]) parser.add_option("-v", "--verbosity", dest='verbosity', action="count", help="increase debug logging level", default=1) (opts, args) = parser.parse_args(args) if len(args) == 0: parser.print_help() sys.exit(1) levels = { 0: logging.ERROR, 1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG, } logging.basicConfig(level=levels.get(opts.verbosity, logging.DEBUG)) loader = xtuml.ModelLoader() for filename in args: loader.filename_input(filename) m = loader.build_metamodel() error = 0 for rel_id in opts.rel_ids: error += xtuml.check_association_integrity(m, rel_id) if not opts.rel_ids: error += xtuml.check_association_integrity(m) for kind in opts.kinds: error += xtuml.check_uniqueness_constraint(m, kind) if not opts.kinds: error += xtuml.check_uniqueness_constraint(m) return error
def test_serialize(self): s1 = xtuml.serialize(self.m) l = xtuml.ModelLoader() l.input(s1) s2 = xtuml.serialize(l.build_metamodel()) self.assertEqual(s1, s2)