コード例 #1
0
 def setUp(self):
     self.m = bridgepoint.load_metamodel()
コード例 #2
0
def run_build(working_directory='.',
              gen_workspace='code_generation',
              output_directory='src',
              variant='c',
              model_inputs=[]):

    # setup build
    print('MC-3020: Setting up build environment...')
    working_directory = os.path.abspath(
        working_directory)  # resolve working directory path
    gen_workspace = os.path.abspath(
        gen_workspace)  # resolve gen workspace path
    os.environ['ROX_MC_ARC_DIR'] = os.path.join(
        gen_workspace, 'arc')  # set archetype directory
    output_directory = os.path.abspath(output_directory)  # resolve output path
    if not os.path.exists(output_directory):
        os.makedirs(output_directory)

    # prepare gen workspace
    print('MC-3020: Preparing generation workspace...')
    if os.path.exists(gen_workspace):
        shutil.rmtree(gen_workspace)
    if not os.path.exists(gen_workspace):
        os.makedirs(gen_workspace)

    # copy archetypes
    print('MC-3020: Installing model compiler archetypes...')
    if not os.path.exists(os.path.join(gen_workspace, 'arc')):
        os.makedirs(os.path.join(gen_workspace, 'arc'))
    for arcfile in filter(
            lambda path: not os.path.isdir(os.path.join(ARCDIR, path)),
            os.listdir(ARCDIR)):
        shutil.copyfile(os.path.join(ARCDIR, arcfile),
                        os.path.join(gen_workspace, 'arc', arcfile))
    if os.path.exists(os.path.join(ARCDIR, variant)) and os.path.isdir(
            os.path.join(ARCDIR, variant)):
        for arcfile in filter(
                lambda path: not os.path.isdir(
                    os.path.join(ARCDIR, variant, path)),
                os.listdir(os.path.join(ARCDIR, variant))):
            shutil.copyfile(os.path.join(ARCDIR, variant, arcfile),
                            os.path.join(gen_workspace, 'arc', arcfile))

    # copy marking files
    print('MC-3020: Installing user marks...')
    for markfile in filter(lambda path: os.path.splitext(path)[1] == '.mark',
                           os.listdir(os.path.join(SCHEMADIR, 'colors'))):
        shutil.copyfile(os.path.join(SCHEMADIR, 'colors', markfile),
                        os.path.join(gen_workspace, markfile))
    shutil.copyfile(os.path.join(SCHEMADIR, 'colors', 'sys_functions.arc'),
                    os.path.join(gen_workspace, 'sys_functions.arc'))
    for user_markfile in filter(
            lambda path: os.path.splitext(path)[1] == '.mark',
            os.listdir(working_directory)):
        shutil.copyfile(os.path.join(working_directory, user_markfile),
                        os.path.join(gen_workspace, user_markfile))

    # execute pre-build
    print('MC-3020: Pre-building...')
    model = bridgepoint.load_metamodel(model_inputs)
    bridgepoint.prebuild_model(model)
    xtuml.persist_instances(model, os.path.join(gen_workspace, 'a.sql'))

    # execute code generation
    print('MC-3020: Generating code...')
    os.chdir(gen_workspace)
    id_generator = xtuml.IntegerGenerator()
    model = xtuml.MetaModel(id_generator)
    loader = xtuml.ModelLoader()
    loader.filename_input(os.path.join(SCHEMADIR, 'sql', 'xtumlmc_schema.sql'))
    loader.filename_input(os.path.join(gen_workspace, 'a.sql'))
    loader.populate(model)
    rt = rsl.Runtime(model, 'change', True, None)
    ast = rsl.parse_file(os.path.join(gen_workspace, 'arc', 'sys.arc'))
    rsl.evaluate(rt, ast, ['.'])

    # copy generated sources to output directory
    print('MC-3020: Installing generated sources...')
    custom_implementations = []
    try:
        with open(os.path.join(working_directory, 'custom.txt')) as f:
            custom_implementations = list(
                map(lambda s: s.strip(), f.readlines()))
    except:
        print('MC-3020: No custom sources found...')
    for src_file in filter(
            lambda path: os.path.splitext(path)[1] in ['.h', '.c'],
            os.listdir(os.path.join(gen_workspace, '_ch'))):
        if src_file in custom_implementations:
            shutil.copyfile(os.path.join(gen_workspace, '_ch', src_file),
                            os.path.join(output_directory, src_file + '.orig'))
        else:
            shutil.copyfile(os.path.join(gen_workspace, '_ch', src_file),
                            os.path.join(output_directory, src_file))

    print('MC-3020: Done.')
コード例 #3
0
ファイル: test_sort.py プロジェクト: cortlandstarrett/pyxtuml
 def setUp(self):
     self.m = bridgepoint.load_metamodel()
コード例 #4
0
def main():
    parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path...]",
                                   formatter=optparse.TitledHelpFormatter())
                                   
    parser.set_description(__doc__.strip())
    
    parser.add_option("-o", "--output", dest='output', metavar="PATH",
                      help="save sql model instances to PATH",
                      action="store", default='/dev/stdout')
    
    parser.add_option("-v", "--verbosity", dest='verbosity', action="count",
                      help="increase debug logging level", default=2)
    
    (opts, args) = parser.parse_args()
    if len(args) == 0 or None in [opts.output]:
        parser.print_help()
        sys.exit(1)

    levels = {
              0: logging.ERROR,
              1: logging.WARNING,
              2: logging.INFO,
              3: logging.DEBUG,
    }
    logging.basicConfig(level=levels.get(opts.verbosity, logging.DEBUG))
    
    m1 = bridgepoint.load_metamodel(args, load_globals=True)
    m2 = ooaofooa.empty_model()
    
    dt_boolean = m2.select_any('S_DT', where(Name='boolean'))
    dt_integer = m2.select_any('S_DT', where(Name='integer'))
    dt_real = m2.select_any('S_DT', where(Name='real'))
    dt_string = m2.select_any('S_DT', where(Name='string'))
    dt_unique_id = m2.select_any('S_DT', where(Name='unique_id'))
    dt_void = m2.select_any('S_DT', where(Name='void'))
    
    pe_pe = m2.new('PE_PE', Visibility=True, type=7)
    ep_pkg = m2.new('EP_PKG', Name='SQL_Instance_Stream_Provider')
    relate(pe_pe, ep_pkg, 8001)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_Connect',
                         Action_Semantics_internal=gen_connect(m1))
    mk_parameters(s_sync, from_id=dt_unique_id, from_key_letter=dt_string,
                  to_id=dt_unique_id, to_key_letter=dt_string, rel_id=dt_string)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_Connect_Using',
                         Action_Semantics_internal=gen_connect_using(m1))
    mk_parameters(s_sync, from_id=dt_unique_id, from_key_letter=dt_string,
                  to_id=dt_unique_id, to_key_letter=dt_string, 
                  using_id=dt_unique_id, rel_id=dt_string)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_New',
                         Action_Semantics_internal=gen_new(m1))
    mk_parameters(s_sync, key_letter=dt_string)
    unrelate(s_sync, dt_void, 25)
    relate(s_sync, dt_unique_id, 25)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_Set_Boolean',
                         Action_Semantics_internal=gen_set_boolean(m1))
    mk_parameters(s_sync, key_letter=dt_string, instance_id=dt_unique_id,
                  name=dt_string, value=dt_boolean)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_Set_Integer',
                         Action_Semantics_internal=gen_set_integer(m1))
    mk_parameters(s_sync, key_letter=dt_string, instance_id=dt_unique_id,
                  name=dt_string, value=dt_integer)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_Set_Real',
                         Action_Semantics_internal=gen_set_real(m1))
    mk_parameters(s_sync, key_letter=dt_string, instance_id=dt_unique_id,
                  name=dt_string, value=dt_real)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_Set_String',
                         Action_Semantics_internal=gen_set_string(m1))
    mk_parameters(s_sync, key_letter=dt_string, instance_id=dt_unique_id,
                  name=dt_string, value=dt_string)
    
    s_sync = mk_function(ep_pkg, Name='SQL_Instance_Stream_Set_Unique_Id',
                         Action_Semantics_internal=gen_set_unique_id(m1))
    mk_parameters(s_sync, key_letter=dt_string, instance_id=dt_unique_id,
                  name=dt_string, value=dt_unique_id)
    
    ooaofooa.delete_globals(m2)
    
    with open(opts.output, 'w') as f:
        f.write('-- root-types-contained: Package_c\n')
        f.write('-- generics\n')
        f.write('-- BP 7.1 content: StreamData syschar: 3 persistence-version: 7.1.6\n')
        f.write(xtuml.serialize_instances(m2))