Exemplo n.º 1
0
def main():
    # Parse command line arguments.
    options = parse_args(constants.TOOL_PARSER)

    log.set_verbosity(options.verbosity)

    # Parse each source.
    ast = []
    if options.file:
        for f in options.file:
            s = f.read()
            try:
                ast += parse_to_ast(s, options.cpp, options.cpp_flag)
                if options.resolve_imports:
                    ast, _ = resolve_imports(ast, \
                        os.path.dirname(f.name), options.import_path,
                        options.cpp, options.cpp_flag)
            except CAmkESSyntaxError as e:
                e.set_column(s)
                log.error('%s:%s' % (f.name, str(e)))
                return -1
            except Exception:
                log.exception('Error during lexing/parsing \'%s\''% f.name)
                return -1
            finally:
                f.close()
    else:
        s = sys.stdin.read()
        try:
            ast += parse_to_ast(s, options.cpp, options.cpp_flag)
            if options.resolve_imports:
                ast, _ = resolve_imports(ast, \
                    os.curdir, options.import_path, options.cpp,
                    options.cpp_flag)
        except Exception:
            log.exception('Error during lexing/parsing')
            return -1

    ast = dedupe(ast)

    if options.resolve_references:
        ast = resolve_references(ast)

    # Generate the output and print this.
    out = show(ast)
    print pretty(out)

    return 0
Exemplo n.º 2
0
def add_debug_declarations(target_ast, debug_components, target_assembly):
    # Cheat a little bit by parsing the new debug declarations separately then adding them in
    # Saves us having to manually call object constructors etc.
    with open(DEBUG_CAMKES) as debug_file:
        camkes_text = debug_file.readlines()
    i = 0
    for component in debug_components:
        camkes_text.insert(-1, "connection seL4Debug debug%d_delegate(from debug.%s_GDB_delegate, \
                      to %s.GDB_delegate);" % (i, component, component))
        camkes_text.insert(-1, "connection seL4GDB debug%d(from %s.fault, to debug.%s_fault);"
                     % (i, component, component))
        i += 1
    camkes_text = "\n".join(camkes_text)
    debug_ast = parser.parse_to_ast(camkes_text)
    debug_instances = debug_ast[0].instances
    debug_connections = debug_ast[0].connections
    for assembly in target_ast:
        if assembly == target_assembly:
            for obj in assembly.children():
                if isinstance(obj, ast.Objects.Composition):
                    obj.instances += debug_instances
                    obj.connections += debug_connections
                elif isinstance(obj, ast.Objects.Configuration):
                    serial_irq = ast.Objects.Setting("debug_hw_serial", "irq_attributes", \
                                                     SERIAL_IRQ_NUM)
                    obj.settings.append(serial_irq)
                    serial_attr = ast.Objects.Setting("debug_hw_serial", "serial_attributes",\
                                                      "\"%s\"" % SERIAL_PORTS)
                    obj.settings.append(serial_attr)
    return target_ast
Exemplo n.º 3
0
def main():
    # Input text.
    s = 'assembly { composition { component foo bar; } }'
    print 'Input:\n%s\n' % s

    # 2. Translate your input into an AST.
    ast = camkes.parse_to_ast(s)

    # At this point, ast contains a list of objects whose types are defined in
    # GenericObjects.py, IDLObjects.py and ADLObjects.py. If you want to
    # manipulate the AST you will want to import these.

    # 3. If your input contains import statements that refer to other files,
    # you can use resolve_imports to inline and parse these into your ast.
    ast, _ = camkes.resolve_imports(ast, curdir)

    # 4. If your input contains any references these will be present in the AST
    # as objects of type GenericObjects.Reference. For example, in the input in
    # this example the component type 'foo' is a reference to a component
    # definition that is expected to be provided elsewhere. After performing
    # reference resolution there may still be references in the AST. This
    # occurs when your references cannot be resolved. For example, in the input
    # here 'foo' is never actually defined.
    ast = camkes.resolve_references(ast, False)

    # 5. If you want to get the AST in an output format call show(). This
    # accepts the AST itself.
    print 'Output:\n%s\n' % camkes.show(ast)

    # 6. Some output printers implement a pretty printing function to format the
    # output in a human-readable way. Access this with pretty().
    print 'Pretty printed:\n%s\n' % camkes.pretty(camkes.show(ast))

    return 0
Exemplo n.º 4
0
def main():
    if len(sys.argv) != 2:
        sys.stderr.write('Usage: %s inputfile\n' % sys.argv[0])
        return -1

    with open(sys.argv[1], 'rt') as f:
        s = f.read()

    a = parser.parse_to_ast(s)

    sys.stdout.write('Traverse the AST and print the types of nodes:\n')
    ast.traverse(a, basic_visit, None, None)

    sys.stdout.write('\nNow let\'s try some basic online code generation:\n')
    ast.traverse(a, code_gen_enter, code_gen_exit, {})

    sys.stdout.write('\nHow about the same offline:\n')
    state = {
        'functions':{},
        'infunction':None,
    }
    ast.traverse(a, code_constructor, None, state)
    for k, v in state['functions'].items():
        if v[0] is not None:
            sys.stdout.write(v[0])
        else:
            sys.stdout.write('void ')
        sys.stdout.write('%(name)s(%(params)s) {\n  /* hello world */\n}\n' % {
            'name':k,
            'params':', '.join(map(lambda x: '%s %s' % (x.type, x.name), v[1:])),
        })

    return 0
Exemplo n.º 5
0
def main():
    if len(sys.argv) != 2:
        print >> sys.stderr, 'Usage: %s inputfile' % sys.argv[0]
        return -1

    with open(sys.argv[1], 'r') as f:
        s = f.read()

    a = parser.parse_to_ast(s)

    print 'Traverse the AST and print the types of nodes:'
    ast.traverse(a, basic_visit, None, None)

    print '\nNow let\'s try some basic online code generation:'
    ast.traverse(a, code_gen_enter, code_gen_exit, {})

    print '\nHow about the same offline:'
    state = {
        'functions': {},
        'infunction': None,
    }
    ast.traverse(a, code_constructor, None, state)
    for k, v in state['functions'].items():
        if v[0] is not None:
            print v[0],
        else:
            print 'void ',
        print '%(name)s(%(params)s) {\n  /* hello world */\n}' % {
            'name': k,
            'params': ', '.join(
                map(lambda x: '%s %s' % (x.type, x.name), v[1:])),
        }

    return 0
Exemplo n.º 6
0
def main():
    # Input text.
    s = 'assembly { composition { component foo bar; } }'
    sys.stdout.write('Input:\n%s\n\n' % s)

    # 2. Translate your input into an AST.
    ast = camkes.parse_to_ast(s)

    # At this point, ast contains a list of objects whose types are defined in
    # Objects.py. If you want to manipulate the AST you will want to import the
    # AST module.

    # 3. If your input contains import statements that refer to other files,
    # you can use resolve_imports to inline and parse these into your ast.
    ast, _ = camkes.resolve_imports(ast, curdir)

    # 4. If your input contains any references these will be present in the AST
    # as objects of type camkes.ast.Reference. For example, in the input in
    # this example the component type 'foo' is a reference to a component
    # definition that is expected to be provided elsewhere. After performing
    # reference resolution there may still be references in the AST. This
    # occurs when your references cannot be resolved. For example, in the input
    # here 'foo' is never actually defined.
    ast = camkes.resolve_references(ast, False)

    # 5. If you want to get the AST in an output format call show(). This
    # accepts the AST itself.
    sys.stdout.write('Output:\n%s\n\n' % camkes.show(ast))

    # 6. Some output printers implement a pretty printing function to format the
    # output in a human-readable way. Access this with pretty().
    sys.stdout.write('Pretty printed:\n%s\n\n' %
                     camkes.pretty(camkes.show(ast)))

    return 0
Exemplo n.º 7
0
def main():
    p = argparse.ArgumentParser('standalone RPC stub generator')
    p.add_argument('--input',
                   '-i',
                   type=argparse.FileType('r'),
                   default=sys.stdin,
                   help='interface specification')
    p.add_argument('--output',
                   '-o',
                   type=argparse.FileType('w'),
                   default=sys.stdout,
                   help='file to write output to')
    p.add_argument('--procedure',
                   '-p',
                   help='procedure to operate on '
                   '(defaults to the first available)')
    p.add_argument('--template',
                   '-t',
                   type=argparse.FileType('r'),
                   required=True,
                   help='template to use')
    opts = p.parse_args()

    # Parse the input specification.
    try:
        spec = parser.parse_to_ast(opts.input.read())
    except parser.CAmkESSyntaxError as e:
        sys.stderr.write('syntax error: %s\n' % str(e))
        return -1

    # Find the procedure the user asked for. If they didn't explicitly specify
    # one, we just take the first.
    for obj in spec:
        if isinstance(obj, ast.Procedure):
            if opts.procedure is None or opts.procedure == obj.name:
                proc = obj
                break
    else:
        sys.stderr.write('no matching procedure found\n')
        return -1

    # Simplify the CAmkES AST representation into a representation involving
    # strings and simpler objects.
    proc = simplify(proc)

    # Render the template.
    opts.output.write(render(proc, opts.template.read()))

    return 0
Exemplo n.º 8
0
def main():
    args = parse_args(constants.TOOL_LINT)

    log.set_verbosity(args.verbosity)

    # Parse the input and form the AST.
    ast = []
    for f in args.file:
        try:
            items = parser.parse_to_ast(f)
        except Exception as inst:
            log.critical('Failed to parse input: %s' % str(inst))
            return CRITICAL
        if args.resolve_imports:
            try:
                items, _ = parser.resolve_imports(items, \
                    os.path.dirname(f.name), args.import_path)
            except Exception as inst:
                log.critical('Failed to resolve imports: %s' % str(inst))
                return CRITICAL
        ast += items

    if args.resolve_references:
        try:
            ast = parser.resolve_references(ast)
        except Exception as inst:
            log.critical('Failed to resolve references: %s' % str(inst))
            return CRITICAL

    # Check it for inconsistencies.

    ret = 0

    for m in lint.check(ast):

        if isinstance(m, lint.ProblemWarning):
            log.warning(str(m))
            if ret != ERROR:
                ret = WARNING
        else:  # isinstance(m, lint.ProblemError)
            log.error(str(m))
            ret = ERROR

    return ret
Exemplo n.º 9
0
def main():
    args = parse_args(constants.TOOL_LINT)

    log.set_verbosity(args.verbosity)

    # Parse the input and form the AST.
    ast = []
    for f in args.file:
        try:
            items = parser.parse_to_ast(f)
        except Exception as inst:
            log.critical('Failed to parse input: %s' % str(inst))
            return CRITICAL
        if args.resolve_imports:
            try:
                items, _ = parser.resolve_imports(items, \
                    os.path.dirname(f.name), args.import_path)
            except Exception as inst:
                log.critical('Failed to resolve imports: %s' % str(inst))
                return CRITICAL
        ast += items

    if args.resolve_references:
        try:
            ast = parser.resolve_references(ast, args.allow_forward_references)
        except Exception as inst:
            log.critical('Failed to resolve references: %s' % str(inst))
            return CRITICAL

    # Check it for inconsistencies.

    ret = 0

    for m in lint.check(ast):

        if isinstance(m, lint.ProblemWarning):
            log.warning(str(m))
            if ret != ERROR:
                ret = WARNING
        else: # isinstance(m, lint.ProblemError)
            log.error(str(m))
            ret = ERROR

    return ret
Exemplo n.º 10
0
def create_debug_component_types(target_ast, debug_types):
    for index, component in enumerate(target_ast):
        if isinstance(component, ast.Objects.Component):
            # Find the debug types and copy the definition
            # Add the necessary interfaces
            if component.name in debug_types:
                debug_component = copy.copy(component)
                debug_component.name = debug_types[component.name]
                #Have to make a new component with the debug connections
                # These are the debug interfaces
                new_interface = "uses CAmkES_Debug fault;\n"
                new_interface += "provides CAmkES_Debug GDB_delegate;\n"
                # Get the component as a string and re-parse it with the new interfaces
                string = debug_component.__repr__().split()
                string[-1:-1] = new_interface.split()
                string = " ".join(string)
                debug_ast = parser.parse_to_ast(string)
                # Replace the debug component
                target_ast[index] = debug_ast[0]
    return target_ast
Exemplo n.º 11
0
def main():
    p = argparse.ArgumentParser('standalone RPC stub generator')
    p.add_argument('--input', '-i', type=argparse.FileType('r'),
        default=sys.stdin, help='interface specification')
    p.add_argument('--output', '-o', type=argparse.FileType('w'),
        default=sys.stdout, help='file to write output to')
    p.add_argument('--procedure', '-p', help='procedure to operate on ' \
        '(defaults to the first available)')
    p.add_argument('--template', '-t', type=argparse.FileType('r'),
        required=True, help='template to use')
    opts = p.parse_args()

    # Parse the input specification.
    try:
        spec = parser.parse_to_ast(opts.input.read())
    except parser.CAmkESSyntaxError as e:
        print >>sys.stderr, 'syntax error: %s' % str(e)
        return -1

    # Find the procedure the user asked for. If they didn't explicitly specify
    # one, we just take the first.
    for obj in spec:
        if isinstance(obj, ast.Procedure):
            if opts.procedure is None or opts.procedure == obj.name:
                proc = obj
                break
    else:
        print >>sys.stderr, 'no matching procedure found'
        return -1

    # Simplify the CAmkES AST representation into a representation involving
    # strings and simpler objects.
    proc = simplify(proc)

    # Render the template.
    print >>opts.output, render(proc, opts.template.read())

    return 0
Exemplo n.º 12
0
def main(argv):
    # Parse input
    vm_mode = False
    try:
        opts, args = getopt.getopt(argv, "cmv:")
    except getopt.GetoptError as err:
        print str(err)
        sys.exit(1)
    if len(args) == 0:
        print "Not enough arguments"
        sys.exit(1)
    elif len(args) > 1:
        print "Too many args"
        sys.exit(1)
    else:
        project_camkes = args[0]
        if not os.path.isfile(APPS_FOLDER + project_camkes):
            print "File not found: %s" % APPS_FOLDER + project_camkes
            sys.exit(1)
    for opt, arg in opts:
        if opt == "-c":
            clean_debug(project_camkes)
            sys.exit(0)
        if opt == "-v":
            vm_mode = True
            vm = arg
    # Open camkes file for parsing
    with open(APPS_FOLDER + project_camkes) as camkes_file:
        lines = camkes_file.readlines()
    # Save any imports and add them back in
    imports = ""
    import_regex = re.compile(r'import .*')
    for line in lines:
        if import_regex.match(line):
            imports += line

    camkes_text = "\n".join(lines)
    # Parse using camkes parser
    camkes_builtin_path = os.path.realpath(__file__ + '/../../camkes/include/builtin')
    include_path = [camkes_builtin_path]
    if vm_mode:
        print "vm mode"
        cpp = True
        config_path = os.path.realpath(__file__ + '/../../../apps/%s/configurations' % vm)
        vm_components_path = os.path.realpath(__file__ + '/../../../apps/%s/../../components/VM' % vm)
        plat_includes = os.path.realpath(__file__ + '/../../../kernel/include/plat/%s' % PLAT)
        cpp_options  = ['-DCAMKES_VM_CONFIG=%s' % vm, "-I"+config_path, "-I"+vm_components_path, "-I"+plat_includes]
        include_path.append(os.path.realpath(__file__ + "/../../../projects/vm/components"))
        include_path.append(os.path.realpath(__file__ + "/../../../projects/vm/interfaces"))
    else:
        cpp = False
        cpp_options = []
    target_ast = parser.parse_to_ast(camkes_text, cpp, cpp_options)  
    # Resolve other imports
    project_dir = os.path.dirname(os.path.realpath(APPS_FOLDER + project_camkes)) + "/"
    target_ast, _ = parser.resolve_imports(target_ast, project_dir, include_path, cpp, cpp_options)
    target_ast = parser.resolve_references(target_ast)
    # Find debug components declared in the camkes file
    debug_components = get_debug_components(target_ast)
    # Use the declared debug components to find the types we must generate
    debug_types, target_assembly = get_debug_component_types(target_ast, debug_components)
    # Generate the new types
    target_ast = create_debug_component_types(target_ast, debug_types)
    # Add declarations for the new types
    target_ast = add_debug_declarations(target_ast, debug_components, target_assembly)
    # Get the static definitions needed every time
    debug_definitions = get_debug_definitions()
    # Generate server based on debug components
    debug_definitions += generate_server_component(debug_components)
    # Update makefile with the new debug camkes
    update_makefile(project_camkes, debug_types)
    # Copy the templates into the project directory
    copy_templates(project_camkes)
    # Add our debug definitions
    new_camkes = parser.pretty(parser.show(target_ast))
    # Reparse and rearrange the included code
    procedures = []
    main_ast = []
    new_ast = parser.parse_to_ast(new_camkes, cpp, cpp_options)
    for component in new_ast:
        if isinstance(component, ast.Objects.Procedure):
            procedures.append(component)
        else:
            main_ast.append(component)   
    final_camkes = imports + debug_definitions + parser.pretty(parser.show(procedures + main_ast))
    # Write new camkes file
    with open(APPS_FOLDER + project_camkes + ".dbg", 'w') as f:
        for line in final_camkes:
            f.write(line)
    # Write a gdbinit file
    name_regex = re.compile(r"(.*)/")
    search = name_regex.search(project_camkes)
    if debug_components:
        write_gdbinit(search.group(1), debug_components)
    else:
        print "No debug components found"
Exemplo n.º 13
0
def main():
    options = parse_args(constants.TOOL_RUNNER)

    # Save us having to pass debugging everywhere.
    die = functools.partial(_die, options.verbosity >= 3)

    log.set_verbosity(options.verbosity)

    def done(s):
        ret = 0
        if s:
            options.outfile.write(s)
            options.outfile.close()
        sys.exit(ret)

    if not options.platform or options.platform in ('?', 'help') \
            or options.platform not in PLATFORMS:
        die('Valid --platform arguments are %s' % ', '.join(PLATFORMS))

    if not options.file or len(options.file) > 1:
        die('A single input file must be provided for this operation')

    # Construct the compilation cache if requested.
    cache = None
    if options.cache in ('on', 'readonly', 'writeonly'):
        cache = Cache(options.cache_dir)

    f = options.file[0]
    try:
        s = f.read()
        # Try to find this output in the compilation cache if possible. This is
        # one of two places that we check in the cache. This check will 'hit'
        # if the source files representing the input spec are identical to some
        # previous execution.
        if options.cache in ('on', 'readonly'):
            key = [
                version_hash(),
                os.path.abspath(f.name), s,
                cache_relevant_options(options), options.platform, options.item
            ]
            value = cache.get(key)
            assert value is None or isinstance(value, FileSet), \
                'illegally cached a value for %s that is not a FileSet' % options.item
            if value is not None and value.valid():
                # Cache hit.
                log.debug('Retrieved %(platform)s.%(item)s from cache' % \
                    options.__dict__)
                done(value.output)
        ast = parser.parse_to_ast(s, options.cpp, options.cpp_flag,
                                  options.ply_optimise)
        parser.assign_filenames(ast, f.name)
    except parser.CAmkESSyntaxError as e:
        e.set_column(s)
        die('%s:%s' % (f.name, str(e)))
    except Exception as inst:
        die('While parsing \'%s\': %s' % (f.name, inst))

    try:
        for t in AST_TRANSFORMS[PRE_RESOLUTION]:
            ast = t(ast)
    except Exception as inst:
        die('While transforming AST: %s' % str(inst))

    try:
        ast, imported = parser.resolve_imports(ast, \
            os.path.dirname(os.path.abspath(f.name)), options.import_path,
            options.cpp, options.cpp_flag, options.ply_optimise)
    except Exception as inst:
        die('While resolving imports of \'%s\': %s' % (f.name, inst))

    try:
        # if there are multiple assemblies, combine them now
        compose_assemblies(ast)
    except Exception as inst:
        die('While combining assemblies: %s' % str(inst))

    # If we have a readable cache check if our current target is in the cache.
    # The previous check will 'miss' and this one will 'hit' when the input
    # spec is identical to some previous execution modulo a semantically
    # irrelevant element (e.g. an introduced comment). I.e. the previous check
    # matches when the input is exactly the same and this one matches when the
    # AST is unchanged.
    if options.cache in ('on', 'readonly'):
        key = [
            version_hash(), ast,
            cache_relevant_options(options), options.platform, options.item
        ]
        value = cache.get(key)
        if value is not None:
            assert options.item not in NEVER_AST_CACHE, \
                '%s, that is marked \'never cache\' is in your cache' % options.item
            log.debug('Retrieved %(platform)s.%(item)s from cache' % \
                options.__dict__)
            done(value)

    # If we have a writable cache, allow outputs to be saved to it.
    if options.cache in ('on', 'writeonly'):
        orig_ast = deepcopy(ast)
        fs = FileSet(imported)

        def save(item, value):
            # Save an input-keyed cache entry. This one is based on the
            # pre-parsed inputs to save having to derive the AST (parse the
            # input) in order to locate a cache entry in following passes.
            # This corresponds to the first cache check above.
            key = [
                version_hash(),
                os.path.abspath(options.file[0].name), s,
                cache_relevant_options(options), options.platform, item
            ]
            specialised = fs.specialise(value)
            if item == 'capdl':
                specialised.extend(options.elf)
            cache[key] = specialised
            if item not in NEVER_AST_CACHE:
                # Save an AST-keyed cache entry. This corresponds to the second
                # cache check above.
                cache[[
                    version_hash(), orig_ast,
                    cache_relevant_options(options), options.platform, item
                ]] = value
    else:

        def save(item, value):
            pass

    ast = parser.dedupe(ast)
    try:
        ast = parser.resolve_references(ast)
    except Exception as inst:
        die('While resolving references of \'%s\': %s' % (f.name, inst))

    try:
        parser.collapse_references(ast)
    except Exception as inst:
        die('While collapsing references of \'%s\': %s' % (f.name, inst))

    try:
        for t in AST_TRANSFORMS[POST_RESOLUTION]:
            ast = t(ast)
    except Exception as inst:
        die('While transforming AST: %s' % str(inst))

    try:
        resolve_hierarchy(ast)
    except Exception as inst:
        die('While resolving hierarchy: %s' % str(inst))

    # All references in the AST need to be resolved for us to continue.
    unresolved = reduce(lambda a, x: a.union(x),
                        map(lambda x: x.unresolved(), ast), set())
    if unresolved:
        die('Unresolved references in input specification:\n %s' % \
            '\n '.join(map(lambda x: '%(filename)s:%(lineno)s:\'%(name)s\' of type %(type)s' % {
                'filename':x.filename or '<unnamed file>',
                'lineno':x.lineno,
                'name':x._symbol,
                'type':x._type.__name__,
            }, unresolved)))

    # Locate the assembly
    assembly = [x for x in ast if isinstance(x, AST.Assembly)]
    if len(assembly) > 1:
        die('Multiple assemblies found')
    elif len(assembly) == 1:
        assembly = assembly[0]
    else:
        die('No assembly found')

    obj_space = ObjectAllocator()
    obj_space.spec.arch = options.architecture
    cspaces = {}
    pds = {}
    conf = assembly.configuration
    shmem = defaultdict(dict)

    templates = Templates(options.platform)
    map(templates.add_root, options.templates)
    r = Renderer(templates.get_roots(), options)

    # The user may have provided their own connector definitions (with
    # associated) templates, in which case they won't be in the built-in lookup
    # dictionary. Let's add them now. Note, definitions here that conflict with
    # existing lookup entries will overwrite the existing entries.
    for c in (x for x in ast if isinstance(x, AST.Connector)):
        if c.from_template:
            templates.add(c.name, 'from.source', c.from_template)
        if c.to_template:
            templates.add(c.name, 'to.source', c.to_template)

    # We're now ready to instantiate the template the user requested, but there
    # are a few wrinkles in the process. Namely,
    #  1. Template instantiation needs to be done in a deterministic order. The
    #     runner is invoked multiple times and template code needs to be
    #     allocated identical cap slots in each run.
    #  2. Components and connections need to be instantiated before any other
    #     templates, regardless of whether they are the ones we are after. Some
    #     other templates, such as the Makefile depend on the obj_space and
    #     cspaces.
    #  3. All actual code templates, up to the template that was requested,
    #     need to be instantiated. This is related to (1) in that the cap slots
    #     allocated are dependent on what allocations have been done prior to a
    #     given allocation call.

    # Instantiate the per-component source and header files.
    for id, i in enumerate(assembly.composition.instances):
        # Don't generate any code for hardware components.
        if i.type.hardware:
            continue

        if i.address_space not in cspaces:
            p = Perspective(phase=RUNNER,
                            instance=i.name,
                            group=i.address_space)
            cnode = obj_space.alloc(seL4_CapTableObject,
                                    name=p['cnode'],
                                    label=i.address_space)
            cspaces[i.address_space] = CSpaceAllocator(cnode)
            pd = obj_space.alloc(seL4_PageDirectoryObject,
                                 name=p['pd'],
                                 label=i.address_space)
            pds[i.address_space] = pd

        for t in ('%s.source' % i.name, '%s.header' % i.name,
                  '%s.linker' % i.name):
            try:
                template = templates.lookup(t, i)
                g = ''
                if template:
                    g = r.render(i, assembly, template, obj_space, cspaces[i.address_space], \
                        shmem, options=options, id=id, my_pd=pds[i.address_space])
                save(t, g)
                if options.item == t:
                    if not template:
                        log.warning('Warning: no template for %s' %
                                    options.item)
                    done(g)
            except Exception as inst:
                die('While rendering %s: %s' % (i.name, inst))

    # Instantiate the per-connection files.
    conn_dict = {}
    for id, c in enumerate(assembly.composition.connections):
        tmp_name = c.name
        key_from = (c.from_instance.name + '_' +
                    c.from_interface.name) in conn_dict
        key_to = (c.to_instance.name + '_' + c.to_interface.name) in conn_dict
        if not key_from and not key_to:
            # We need a new connection name
            conn_name = 'conn' + str(id)
            c.name = conn_name
            conn_dict[c.from_instance.name + '_' +
                      c.from_interface.name] = conn_name
            conn_dict[c.to_instance.name + '_' +
                      c.to_interface.name] = conn_name
        elif not key_to:
            conn_name = conn_dict[c.from_instance.name + '_' +
                                  c.from_interface.name]
            c.name = conn_name
            conn_dict[c.to_instance.name + '_' +
                      c.to_interface.name] = conn_name
        elif not key_from:
            conn_name = conn_dict[c.to_instance.name + '_' +
                                  c.to_interface.name]
            c.name = conn_name
            conn_dict[c.from_instance.name + '_' +
                      c.from_interface.name] = conn_name
        else:
            continue

        for t in (('%s.from.source' % tmp_name, c.from_instance.address_space),
                  ('%s.from.header' % tmp_name, c.from_instance.address_space),
                  ('%s.to.source' % tmp_name, c.to_instance.address_space),
                  ('%s.to.header' % tmp_name, c.to_instance.address_space)):
            try:
                template = templates.lookup(t[0], c)
                g = ''
                if template:
                    g = r.render(c, assembly, template, obj_space, cspaces[t[1]], \
                        shmem, options=options, id=id, my_pd=pds[t[1]])
                save(t[0], g)
                if options.item == t[0]:
                    if not template:
                        log.warning('Warning: no template for %s' %
                                    options.item)
                    done(g)
            except Exception as inst:
                die('While rendering %s: %s' % (t[0], inst))
        c.name = tmp_name

        # The following block handles instantiations of per-connection
        # templates that are neither a 'source' or a 'header', as handled
        # above. We assume that none of these need instantiation unless we are
        # actually currently looking for them (== options.item). That is, we
        # assume that following templates, like the CapDL spec, do not require
        # these templates to be rendered prior to themselves.
        # FIXME: This is a pretty ugly way of handling this. It would be nicer
        # for the runner to have a more general notion of per-'thing' templates
        # where the per-component templates, the per-connection template loop
        # above, and this loop could all be done in a single unified control
        # flow.
        for t in (('%s.from.' % c.name, c.from_instance.address_space),
                  ('%s.to.' % c.name, c.to_instance.address_space)):
            if not options.item.startswith(t[0]):
                # This is not the item we're looking for.
                continue
            try:
                # If we've reached here then this is the exact item we're
                # after.
                template = templates.lookup(options.item, c)
                if template is None:
                    raise Exception('no registered template for %s' %
                                    options.item)
                g = r.render(c, assembly, template, obj_space, cspaces[t[1]], \
                    shmem, options=options, id=id, my_pd=pds[t[1]])
                save(options.item, g)
                done(g)
            except Exception as inst:
                die('While rendering %s: %s' % (options.item, inst))

    # Perform any per component simple generation. This needs to happen last
    # as this template needs to run after all other capabilities have been
    # allocated
    for id, i in enumerate(assembly.composition.instances):
        # Don't generate any code for hardware components.
        if i.type.hardware:
            continue
        assert i.address_space in cspaces
        if conf and conf.settings and [x for x in conf.settings if \
                x.instance == i.name and x.attribute == 'simple' and x.value]:
            for t in ('%s.simple' % i.name, ):
                try:
                    template = templates.lookup(t, i)
                    g = ''
                    if template:
                        g = r.render(i, assembly, template, obj_space, cspaces[i.address_space], \
                            shmem, options=options, id=id, my_pd=pds[i.address_space])
                    save(t, g)
                    if options.item == t:
                        if not template:
                            log.warning('Warning: no template for %s' %
                                        options.item)
                        done(g)
                except Exception as inst:
                    die('While rendering %s: %s' % (i.name, inst))

    # Derive a set of usable ELF objects from the filenames we were passed.
    elfs = {}
    for e in options.elf:
        try:
            name = os.path.basename(e)
            if name in elfs:
                raise Exception(
                    'duplicate ELF files of name \'%s\' encountered' % name)
            elf = ELF(e, name, options.architecture)
            p = Perspective(phase=RUNNER, elf_name=name)
            group = p['group']
            # Avoid inferring a TCB as we've already created our own.
            elf_spec = elf.get_spec(infer_tcb=False,
                                    infer_asid=False,
                                    pd=pds[group],
                                    use_large_frames=options.largeframe)
            obj_space.merge(elf_spec, label=group)
            elfs[name] = (e, elf)
        except Exception as inst:
            die('While opening \'%s\': %s' % (e, inst))

    if options.item in ('capdl', 'label-mapping'):
        # It's only relevant to run these filters if the final target is CapDL.
        # Note, this will no longer be true if we add any other templates that
        # depend on a fully formed CapDL spec. Guarding this loop with an if
        # is just an optimisation and the conditional can be removed if
        # desired.
        for f in CAPDL_FILTERS:
            try:
                # Pass everything as named arguments to allow filters to
                # easily ignore what they don't want.
                f(ast=ast,
                  obj_space=obj_space,
                  cspaces=cspaces,
                  elfs=elfs,
                  options=options,
                  shmem=shmem)
            except Exception as inst:
                die('While forming CapDL spec: %s' % str(inst))

    # Instantiate any other, miscellaneous template. If we've reached this
    # point, we know the user did not request a code template.
    try:
        template = templates.lookup(options.item)
        if template:
            g = r.render(assembly, assembly, template, obj_space, None, \
                shmem, imported=imported, options=options)
            save(options.item, g)
            done(g)
    except Exception as inst:
        die('While rendering %s: %s' % (options.item, inst))

    die('No valid element matching --item %s' % options.item)
Exemplo n.º 14
0
def main():
    options = parse_args(constants.TOOL_RUNNER)

    # Save us having to pass debugging everywhere.
    die = functools.partial(_die, options.verbosity >= 3)

    log.set_verbosity(options.verbosity)

    def done(s):
        ret = 0
        if s:
            print >>options.outfile, s
            options.outfile.close()
            if options.post_render_edit and \
                    raw_input('Edit rendered template %s [y/N]? ' % \
                    options.outfile.name) == 'y':
                editor = os.environ.get('EDITOR', 'vim')
                ret = subprocess.call([editor, options.outfile.name])
        sys.exit(ret)

    if not options.platform or options.platform in ['?', 'help'] \
            or options.platform not in PLATFORMS:
        die('Valid --platform arguments are %s' % ', '.join(PLATFORMS))

    if not options.file or len(options.file) > 1:
        die('A single input file must be provided for this operation')

    try:
        profiler = get_profiler(options.profiler, options.profile_log)
    except Exception as inst:
        die('Failed to create profiler: %s' % str(inst))

    # Construct the compilation cache if requested.
    cache = None
    if options.cache in ['on', 'readonly', 'writeonly']:
        cache = Cache(options.cache_dir)

    f = options.file[0]
    try:
        with profiler('Reading input'):
            s = f.read()
        # Try to find this output in the compilation cache if possible. This is
        # one of two places that we check in the cache. This check will 'hit'
        # if the source files representing the input spec are identical to some
        # previous execution.
        if options.cache in ['on', 'readonly']:
            with profiler('Looking for a cached version of this output'):
                key = [version(), os.path.abspath(f.name), s,
                    cache_relevant_options(options), options.platform,
                    options.item]
                value = cache.get(key)
                if value is not None and value.valid():
                    # Cache hit.
                    assert isinstance(value, FileSet), \
                        'illegally cached a value for %s that is not a FileSet' % options.item
                    log.debug('Retrieved %(platform)s.%(item)s from cache' % \
                        options.__dict__)
                    done(value.output)
        with profiler('Parsing input'):
            ast = parser.parse_to_ast(s, options.cpp, options.cpp_flag, options.ply_optimise)
            parser.assign_filenames(ast, f.name)
    except parser.CAmkESSyntaxError as e:
        e.set_column(s)
        die('%s:%s' % (f.name, str(e)))
    except Exception as inst:
        die('While parsing \'%s\': %s' % (f.name, str(inst)))

    try:
        for t in AST_TRANSFORMS[PRE_RESOLUTION]:
            with profiler('Running AST transform %s' % t.__name__):
                ast = t(ast)
    except Exception as inst:
        die('While transforming AST: %s' % str(inst))

    try:
        with profiler('Resolving imports'):
            ast, imported = parser.resolve_imports(ast, \
                os.path.dirname(os.path.abspath(f.name)), options.import_path,
                options.cpp, options.cpp_flag, options.ply_optimise)
    except Exception as inst:
        die('While resolving imports of \'%s\': %s' % (f.name, str(inst)))

    try:
        with profiler('Combining assemblies'):
            # if there are multiple assemblies, combine them now
            compose_assemblies(ast)
    except Exception as inst:
        die('While combining assemblies: %s' % str(inst))

    with profiler('Caching original AST'):
        orig_ast = deepcopy(ast)
    with profiler('Deduping AST'):
        ast = parser.dedupe(ast)
    try:
        with profiler('Resolving references'):
            ast = parser.resolve_references(ast)
    except Exception as inst:
        die('While resolving references of \'%s\': %s' % (f.name, str(inst)))

    try:
        with profiler('Collapsing references'):
            parser.collapse_references(ast)
    except Exception as inst:
        die('While collapsing references of \'%s\': %s' % (f.name, str(inst)))

    try:
        for t in AST_TRANSFORMS[POST_RESOLUTION]:
            with profiler('Running AST transform %s' % t.__name__):
                ast = t(ast)
    except Exception as inst:
        die('While transforming AST: %s' % str(inst))

    try:
        with profiler('Resolving hierarchy'):
            resolve_hierarchy(ast)
    except Exception as inst:
        die('While resolving hierarchy: %s' % str(inst))

    # If we have a readable cache check if our current target is in the cache.
    # The previous check will 'miss' and this one will 'hit' when the input
    # spec is identical to some previous execution modulo a semantically
    # irrelevant element (e.g. an introduced comment). I.e. the previous check
    # matches when the input is exactly the same and this one matches when the
    # AST is unchanged.
    if options.cache in ['on', 'readonly']:
        with profiler('Looking for a cached version of this output'):
            key = [version(), orig_ast, cache_relevant_options(options),
                options.platform, options.item]
            value = cache.get(key)
            if value is not None:
                assert options.item not in NEVER_AST_CACHE, \
                    '%s, that is marked \'never cache\' is in your cache' % options.item
                log.debug('Retrieved %(platform)s.%(item)s from cache' % \
                    options.__dict__)
                done(value)

    # If we have a writable cache, allow outputs to be saved to it.
    if options.cache in ['on', 'writeonly']:
        fs = FileSet(imported)
        def save(item, value):
            # Save an input-keyed cache entry. This one is based on the
            # pre-parsed inputs to save having to derive the AST (parse the
            # input) in order to locate a cache entry in following passes.
            # This corresponds to the first cache check above.
            key = [version(), os.path.abspath(options.file[0].name), s,
                cache_relevant_options(options), options.platform,
                item]
            specialised = fs.specialise(value)
            if item == 'capdl':
                specialised.extend(options.elf or [])
            cache[key] = specialised
            if item not in NEVER_AST_CACHE:
                # Save an AST-keyed cache entry. This corresponds to the second
                # cache check above.
                cache[[version(), orig_ast, cache_relevant_options(options),
                    options.platform, item]] = value
    else:
        def save(item, value):
            pass

    # All references in the AST need to be resolved for us to continue.
    unresolved = reduce(lambda a, x: a.union(x),
        map(lambda x: x.unresolved(), ast), set())
    if unresolved:
        die('Unresolved references in input specification:\n %s' % \
            '\n '.join(map(lambda x: '%(filename)s:%(lineno)s:\'%(name)s\' of type %(type)s' % {
                'filename':x.filename or '<unnamed file>',
                'lineno':x.lineno,
                'name':x._symbol,
                'type':x._type.__name__,
            }, unresolved)))

    # Locate the assembly
    assembly = [x for x in ast if isinstance(x, AST.Assembly)]
    if len(assembly) > 1:
        die('Multiple assemblies found')
    elif len(assembly) == 1:
        assembly = assembly[0]
    else:
        die('No assembly found')

    obj_space = ObjectAllocator()
    cspaces = {}
    pds = {}
    conf = assembly.configuration
    shmem = defaultdict(dict)

    # We need to create a phony instance and connection to cope with cases
    # where the user has not defined any instances or connections (this would
    # be an arguably useless system, but we should still support it). We append
    # these to the template's view of the system below to ensure we always get
    # a usable template dictionary. Note that this doesn't cause any problems
    # because the phony items are named '' and thus unaddressable in ADL.
    dummy_instance = AST.Instance(AST.Reference('', AST.Instance), '')
    dummy_connection = AST.Connection(AST.Reference('', AST.Connector), '', \
        AST.Reference('', AST.Instance), AST.Reference('', AST.Interface), \
        AST.Reference('', AST.Instance), AST.Reference('', AST.Interface))

    templates = Templates(options.platform,
        instance=map(lambda x: x.name, assembly.composition.instances + \
            [dummy_instance]), \
        connection=map(lambda x: x.name, assembly.composition.connections + \
            [dummy_connection]))
    if options.templates:
        templates.add_root(options.templates)
    r = Renderer(templates.get_roots(), options)

    # The user may have provided their own connector definitions (with
    # associated) templates, in which case they won't be in the built-in lookup
    # dictionary. Let's add them now. Note, definitions here that conflict with
    # existing lookup entries will overwrite the existing entries.
    for c in [x for x in ast if isinstance(x, AST.Connector)]:
        if c.from_template:
            templates.add(c.name, 'from.source', c.from_template)
        if c.to_template:
            templates.add(c.name, 'to.source', c.to_template)

    # We're now ready to instantiate the template the user requested, but there
    # are a few wrinkles in the process. Namely,
    #  1. Template instantiation needs to be done in a deterministic order. The
    #     runner is invoked multiple times and template code needs to be
    #     allocated identical cap slots in each run.
    #  2. Components and connections need to be instantiated before any other
    #     templates, regardless of whether they are the ones we are after. Some
    #     other templates, such as the Makefile depend on the obj_space and
    #     cspaces.
    #  3. All actual code templates, up to the template that was requested,
    #     need to be instantiated. This is related to (1) in that the cap slots
    #     allocated are dependent on what allocations have been done prior to a
    #     given allocation call.

    # Instantiate the per-component source and header files.
    for id, i in enumerate(assembly.composition.instances):
        # Don't generate any code for hardware components.
        if i.type.hardware:
            continue

        if i.address_space not in cspaces:
            p = Perspective(phase=RUNNER, instance=i.name,
                group=i.address_space)
            cnode = obj_space.alloc(seL4_CapTableObject,
                name=p['cnode'], label=i.address_space)
            cspaces[i.address_space] = CSpaceAllocator(cnode)
            pd = obj_space.alloc(seL4_PageDirectoryObject, name=p['pd'],
                label=i.address_space)
            pds[i.address_space] = pd

        for t in ['%s.source' % i.name, '%s.header' % i.name,
                '%s.linker' % i.name]:
            try:
                template = templates.lookup(t, i)
                g = ''
                if template:
                    with profiler('Rendering %s' % t):
                        g = r.render(i, assembly, template, obj_space, cspaces[i.address_space], \
                            shmem, options=options, id=id, my_pd=pds[i.address_space])
                save(t, g)
                if options.item == t:
                    if not template:
                        log.warning('Warning: no template for %s' % options.item)
                    done(g)
            except Exception as inst:
                die('While rendering %s: %s' % (i.name, str(inst)))

    # Instantiate the per-connection files.
    conn_dict = {}
    for id, c in enumerate(assembly.composition.connections):
        tmp_name = c.name
        key_from = (c.from_instance.name + '_' + c.from_interface.name) in conn_dict
        key_to = (c.to_instance.name + '_' + c.to_interface.name) in conn_dict
        if not key_from and not key_to:
            # We need a new connection name
            conn_name = 'conn' + str(id)
            c.name = conn_name
            conn_dict[c.from_instance.name + '_' + c.from_interface.name] = conn_name
            conn_dict[c.to_instance.name + '_' + c.to_interface.name] = conn_name
        elif not key_to:
            conn_name = conn_dict[c.from_instance.name + '_' + c.from_interface.name]
            c.name = conn_name
            conn_dict[c.to_instance.name + '_' + c.to_interface.name] = conn_name
        elif not key_from:
            conn_name = conn_dict[c.to_instance.name + '_' + c.to_interface.name]
            c.name = conn_name
            conn_dict[c.from_instance.name + '_' + c.from_interface.name] = conn_name
        else:
            continue

        for t in [('%s.from.source' % tmp_name, c.from_instance.address_space),
                  ('%s.from.header' % tmp_name, c.from_instance.address_space),
                  ('%s.to.source' % tmp_name, c.to_instance.address_space),
                  ('%s.to.header' % tmp_name, c.to_instance.address_space)]:
            try:
                template = templates.lookup(t[0], c)
                g = ''
                if template:
                    with profiler('Rendering %s' % t[0]):
                        g = r.render(c, assembly, template, obj_space, cspaces[t[1]], \
                            shmem, options=options, id=id, my_pd=pds[t[1]])
                save(t[0], g)
                if options.item == t[0]:
                    if not template:
                        log.warning('Warning: no template for %s' % options.item)
                    done(g)
            except Exception as inst:
                die('While rendering %s: %s' % (t[0], str(inst)))
        c.name = tmp_name

        # The following block handles instantiations of per-connection
        # templates that are neither a 'source' or a 'header', as handled
        # above. We assume that none of these need instantiation unless we are
        # actually currently looking for them (== options.item). That is, we
        # assume that following templates, like the CapDL spec, do not require
        # these templates to be rendered prior to themselves.
        # FIXME: This is a pretty ugly way of handling this. It would be nicer
        # for the runner to have a more general notion of per-'thing' templates
        # where the per-component templates, the per-connection template loop
        # above, and this loop could all be done in a single unified control
        # flow.
        for t in [('%s.from.' % c.name, c.from_instance.address_space),
                  ('%s.to.' % c.name, c.to_instance.address_space)]:
            if not options.item.startswith(t[0]):
                # This is not the item we're looking for.
                continue
            try:
                # If we've reached here then this is the exact item we're
                # after.
                template = templates.lookup(options.item, c)
                if template is None:
                    raise Exception('no registered template for %s' % options.item)
                with profiler('Rendering %s' % options.item):
                    g = r.render(c, assembly, template, obj_space, cspaces[t[1]], \
                        shmem, options=options, id=id, my_pd=pds[t[1]])
                save(options.item, g)
                done(g)
            except Exception as inst:
                die('While rendering %s: %s' % (options.item, str(inst)))

    # Perform any per component simple generation. This needs to happen last
    # as this template needs to run after all other capabilities have been
    # allocated
    for id, i in enumerate(assembly.composition.instances):
        # Don't generate any code for hardware components.
        if i.type.hardware:
            continue
        assert i.address_space in cspaces
        if conf and conf.settings and [x for x in conf.settings if \
                x.instance == i.name and x.attribute == 'simple' and x.value]:
            for t in ['%s.simple' % i.name]:
                try:
                    template = templates.lookup(t, i)
                    g = ''
                    if template:
                        with profiler('Rendering %s' % t):
                            g = r.render(i, assembly, template, obj_space, cspaces[i.address_space], \
                                shmem, options=options, id=id, my_pd=pds[i.address_space])
                    save(t, g)
                    if options.item == t:
                        if not template:
                            log.warning('Warning: no template for %s' % options.item)
                        done(g)
                except Exception as inst:
                    die('While rendering %s: %s' % (i.name, str(inst)))

    # Derive a set of usable ELF objects from the filenames we were passed.
    elfs = {}
    arch = None
    for e in options.elf or []:
        try:
            name = os.path.basename(e)
            if name in elfs:
                raise Exception('duplicate ELF files of name \'%s\' encountered' % name)
            elf = ELF(e, name)
            if not arch:
                # The spec's arch will have defaulted to ARM, but we want it to
                # be the same as whatever ELF format we're parsing.
                arch = elf.get_arch()
                if arch == 'ARM':
                    obj_space.spec.arch = 'arm11'
                elif arch == 'x86':
                    obj_space.spec.arch = 'ia32'
                else:
                    raise NotImplementedError
            else:
                # All ELF files we're parsing should be the same format.
                if arch != elf.get_arch():
                    raise Exception('ELF files are not all the same architecture')
            # Pass 'False' to avoid inferring a TCB as we've already created
            # our own.
            p = Perspective(phase=RUNNER, elf_name=name)
            group = p['group']
            with profiler('Deriving CapDL spec from %s' % e):
                elf_spec = elf.get_spec(infer_tcb=False, infer_asid=False,
                    pd=pds[group], use_large_frames=options.largeframe)
                obj_space.merge(elf_spec, label=group)
            elfs[name] = (e, elf)
        except Exception as inst:
            die('While opening \'%s\': %s' % (e, str(inst)))

    if options.item in ['capdl', 'label-mapping']:
        # It's only relevant to run these filters if the final target is CapDL.
        # Note, this will no longer be true if we add any other templates that
        # depend on a fully formed CapDL spec. Guarding this loop with an if
        # is just an optimisation and the conditional can be removed if
        # desired.
        for f in CAPDL_FILTERS:
            try:
                with profiler('Running CapDL filter %s' % f.__name__):
                    # Pass everything as named arguments to allow filters to
                    # easily ignore what they don't want.
                    f(ast=ast, obj_space=obj_space, cspaces=cspaces, elfs=elfs,
                        profiler=profiler, options=options, shmem=shmem)
            except Exception as inst:
                die('While forming CapDL spec: %s' % str(inst))

    # Instantiate any other, miscellaneous template. If we've reached this
    # point, we know the user did not request a code template.
    try:
        template = templates.lookup(options.item)
        g = ''
        if template:
            with profiler('Rendering %s' % options.item):
                g = r.render(assembly, assembly, template, obj_space, None, \
                    shmem, imported=imported, options=options)
            save(options.item, g)
            done(g)
    except Exception as inst:
        die('While rendering %s: %s' % (options.item, str(inst)))

    die('No valid element matching --item %s' % options.item)