Пример #1
0
    def __init__(self, ip, port):
        self.ip = ip
        self.port = port
        self.buffsize = 2048**2
        self.main_socket = s.socket(s.AF_INET, s.SOCK_STREAM)  # Create socketTCP
        ###
        self.main_socket.setsockopt(s.SOL_SOCKET, s.SO_REUSEADDR, self.port)
        ###
        self.main_socket.bind((self.ip, self.port))
        self.main_socket.listen(5)
        self.server_is_open = True  # is the game server on?

        self.games = []  # list of on-going games
        self.names = {}  # dictionary of client and his game name
        self.timers = {}  # dictionary of each game and its time state (True/False)  {Game:T/F}
        self.question_start_time = {}  # dictionary of {Game:current_question_start_time}
        self.wall_displays = {}  # dictionary of all wall display sockets and their games {socket:Game}
        self.next_question_request = []  # list of wall displays that requested a next question (for a specific round)
        self.validated = {}  # dictionary of clients and their validated games
        self.validated_login = {}  # dictionary of validated username and password clients, values: T/number of failed logins

        self.messages = {}  # {socket:[messages],...}

        self.inputs = [self.main_socket]
        self.outputs = []
        self.connected = {}  # dictionary of connected clients (including clients not in games) {clientobj:(Game, player_name),...}

        self.m = Module()  # module object for database interaction
Пример #2
0
def insertDeviceModules(moduleList, annotateParentsOnly=False):
     
    elabAreaConstraints = AreaConstraints(moduleList)
    # this was constructed upon the original call to load area.
    elabAreaConstraints.loadAreaConstraintsElaborated()

    for userAreaGroup in elabAreaConstraints.constraints.values():
  
        if('SYNTH_BOUNDARY' in userAreaGroup.attributes):  
             # Modify parent to know about this child.               
             parentModule = moduleList.modules[userAreaGroup.parentName]
             # pick up deps from parent. 
             moduleDeps ={} 
             moduleName = userAreaGroup.attributes['MODULE_NAME']

             # grab the parent module verilog and convert it. This
             # is really ugly, and demonstrates whe first class
             # language constructs are so nice.  Eventually, we
             # should push these new synth boundary objects into
             # flow earlier.
             moduleVerilog = None
             for dep in map(functools.partial(bsv_tool.modify_path_ba, moduleList), model.convertDependencies(moduleList.getAllDependenciesWithPaths('GEN_VERILOGS'))):
                 if (re.search(moduleName, dep)):
                     moduleVerilog = dep  
                  

             if(moduleVerilog is None):
                 print "ERROR: failed to find verilog for area group: " + userAreaGroup.name 
                 exit(1)
        
             moduleVerilogBlackBox = moduleVerilog.replace('.v', '_stub.v')

             moduleDeps['GEN_VERILOG_STUB'] = [moduleVerilogBlackBox]

             # We need to ensure that the second pass glue logic
             # modules don't look at the black box stubs.  The modules
             # are in the current synth boundaries list, but not in the LI graph.
             parentList = [parentModule, moduleList.topModule] + [module for module in moduleList.synthBoundaries() if not module.name in elabAreaConstraints.constraints]
             
             for parent in parentList:
                 print "BLACK_BOX Annotating: " + parent.name
                 if(parent.getAttribute('BLACK_BOX') is None):
                     parent.putAttribute('BLACK_BOX', {moduleVerilog: moduleVerilogBlackBox})
                 else:
                     blackBoxDict = parent.getAttribute('BLACK_BOX') 
                     blackBoxDict[moduleVerilog] = moduleVerilogBlackBox

             if(not annotateParentsOnly):            
                 moduleList.env.Command([moduleVerilogBlackBox], [moduleVerilog],
                                       'leap-gen-black-box -nohash $SOURCE > $TARGET')


                 m = Module(userAreaGroup.name, [moduleName],\
                             parentModule.buildPath, parentModule.name,\
                             [], parentModule.name, [], moduleDeps)
                 m.putAttribute("WRAPPER_NAME", moduleName)
                 m.putAttribute("AREA_GROUP", 1)
                 
                 moduleList.insertModule(m)
Пример #3
0
 def __init__(self):
     #contructor
     #model inherits from the module class
     Module.__init__(self)
     #the model is constitued of 2 linear layer with activation layer
     self.l1 = Sequential(Linear(2, 16), ReLu(), Linear(16, 92))
     self.s1 = TanhS()
     self.l2 = Linear(92, 2)
Пример #4
0
def main():
    opts = optparser.parse_args()[0]

    train_loader = Loader(opts.train)

    opts.vocab_len = len(train_loader._char_to_id)
    opts.pos_len = len(train_loader._pos_to_id)
    opts.max_pos_len = train_loader._pos_max_len
    opts.max_target_len = train_loader._char_max_len
    opts.use_cuda = opts.use_cuda == 1
    opts.eval = opts.eval == 1
    opts.data_size = train_loader.get_data_size()

    if not torch.cuda.is_available():
        opts.use_cuda = False
    torch.manual_seed(opts.seed)
    np.random.seed(opts.seed)

    if not opts.eval:
        # weights for paddings, set to 0
        loss_weights = torch.ones(opts.vocab_len)
        loss_weights[0] = 0
        criterion = nn.NLLLoss(loss_weights, size_average=False)

        c2i, i2c, p2i, i2p = train_loader.get_mappings()
        dev_loader = Loader(opts.dev, c2i, i2c, p2i, i2p)
        if dev_loader._pos_max_len > opts.max_pos_len:
            opts.max_pos_len = dev_loader._pos_max_len

        model = Module(opts)
        if opts.model_path is not '':
            model = torch.load(opts.model_path)

        train_batcher = Batcher(opts.batch_size, train_loader.get_data(),
                                opts.max_pos_len, opts.eval)

        dev_batcher = Batcher(decode_batch, dev_loader.get_data(),
                              opts.max_pos_len, True)

        print model
        start_train(model, criterion, opts, train_batcher, dev_batcher)
    else:
        model = torch.load(opts.model_path)
        model.eval()
        #print model

        c2i, i2c, p2i, i2p = train_loader.get_mappings()

        test_loader = Loader(opts.test, c2i, i2c, p2i, i2p)
        if test_loader._pos_max_len > opts.max_pos_len:
            opts.max_pos_len = test_loader._pos_max_len
        test_batcher = Batcher(1, test_loader.get_data(), opts.max_pos_len,
                               opts.eval)

        opts.data_size = test_loader.get_data_size()
        decode(model, opts, test_batcher, i2c, i2p)
Пример #5
0
def load_modules():
    """Load modules data to database."""

    print "Modules"

    # delete modules before data gets added to avoid duplicate info
    # Module.query.delete()

    # insert data from seed_module
    with open("seed_data/seed_module") as module_data:
        for row in module_data:
            name, description, additional_info, user_id = row.rstrip().split(
                "|")
            module = Module(name=name,
                            description=description,
                            additional_info=additional_info,
                            user_id=user_id)

            # add module to session
            db.session.add(module)

    # commit changes
    db.session.commit()

    print "Modules loaded."
Пример #6
0
def add_modules(username):
    """Add function/module information"""

    if not verify_user(username):
        return redirect("/login")

    mname = request.form.get("mname")
    mdesc = request.form.get("mdesc")
    maddinfo = request.form.get("maddinfo")
    fname = request.form.get("fname")
    fdesc = request.form.get("fdesc")
    faddinfo = request.form.get("faddinfo")
    samplecode = request.form.get("samplecode")
    output = request.form.get("output")

    if fname == "":
        flash("Please input a function name.")
        return redirect("/{}/addmodules".format(username))

    # fetch user to get user_id
    user = User.query.filter_by(username=username).first()

    existing_mod = Module.query.filter(
        (Module.user_id == user.user_id) | (Module.user_id == 1),
        Module.name == mname).first()

    if mname == "":
        module = Module.query.filter_by(module_id=1).first()
    elif existing_mod:
        module = existing_mod
    else:
        module = Module(name=mname,
                        description=mdesc,
                        additional_info=maddinfo,
                        user_id=user.user_id)

        db.session.add(module)
        db.session.commit()

    function = Function(name=fname,
                        description=fdesc,
                        additional_info=faddinfo,
                        sample_code=samplecode,
                        output=output,
                        user_id=user.user_id,
                        module_id=module.module_id)

    db.session.add(function)
    db.session.commit()

    flash("Your notes have been added.")

    return redirect("/{}/studynotes".format(username))
Пример #7
0
    def __getitem__(self, idx):
        return (self.x[idx], self.t[idx], idx)


# ===================================================================================================== #
batch_size = {'train': 32, 'valid': 32}
dataloader = {
    phase:
    torch.utils.data.DataLoader(dataset=SrDataset(phase, dire, width, height),
                                batch_size=batch_size[phase],
                                shuffle=False)
    for phase in ['valid']
}

use_gpu = torch.cuda.is_available()
module = Module()
module.load_state_dict(torch.load(pretrained))

fid = open('parameters', 'wb+')
for param in module.parameters():
    b = param.data.numpy()
    fid.write(b)
fid.close()

if use_gpu:
    module.cuda()
    module = nn.DataParallel(module, gpu)

for stage in ([0] * 1):
    #   for epoch in range(1):
    for phase in ["valid"]:
Пример #8
0
  def __init__(self, moduleList):
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    topModulePath = get_build_path(moduleList, moduleList.topModule)
    # The LIM compiler uniquifies synthesis boundary names  
    uidOffset = int(moduleList.getAWBParam('wrapper_gen_tool', 'MODULE_UID_OFFSET'))

    # We only inject the platform wrapper in first pass builds.  In
    # the second pass, we import the first pass object code.  It may
    # be that we need this code?

    # Inject a synth boundary for platform build code.  we need to
    # pick up some dependencies from the top level code.  this is a
    # pretty major hack, in my opinion. Better would be to actually
    # inspect the eventual .ba files for their dependencies. 
    platformName = moduleList.localPlatformName + '_platform'
    platformDeps = {}
    platformDeps['GEN_VERILOGS'] = []
    platformDeps['GEN_BAS'] = [] #moduleList.getSynthBoundaryDependencies(moduleList.topModule, 'GEN_BAS')                               
    platformDeps['GEN_VPI_HS'] = moduleList.getSynthBoundaryDependencies(moduleList.topModule, 'GEN_VPI_HS')                               
    platformDeps['GEN_VPI_CS'] = moduleList.getSynthBoundaryDependencies(moduleList.topModule, 'GEN_VPI_CS')                          
     
    #This is sort of a hack.

    platformDeps['GIVEN_BSVS'] = []
    platformDeps['WRAPPER_BSHS'] = ['awb/provides/virtual_platform.bsh', 'awb/provides/physical_platform.bsh']
    platformDeps['BA'] = []
    platformDeps['STR'] = []
    platformDeps['VERILOG'] = [topModulePath + '/' + TMP_BSC_DIR + '/mk_' + platformName + '_Wrapper.v']
    platformDeps['BSV_LOG'] = []
    platformDeps['VERILOG_STUB'] = []
       
    platform_module = Module( platformName, ["mkVirtualPlatform"], moduleList.topModule.buildPath,\
                          moduleList.topModule.name,\
                          [], moduleList.topModule.name, [], platformDeps, platformModule=True)

    platform_module.dependsFile = '.depends-platform'
    platform_module.interfaceType = 'VIRTUAL_PLATFORM'
    platform_module.extraImports = ['virtual_platform']

    first_pass_LI_graph = getFirstPassLIGraph()
    if(first_pass_LI_graph is None):
        moduleList.insertModule(platform_module)
        moduleList.graphize()
        moduleList.graphizeSynth()

        # Sprinkle more files expected by the two-pass build.  
        generateWrapperStub(moduleList, platform_module)
        generateAWBCompileWrapper(moduleList, platform_module)

    else:
        platform_module_li = first_pass_LI_graph.modules[moduleList.localPlatformName + '_platform']

        # This gives us the right path. 
        synthHandle = getSynthHandle(moduleList, platform_module)

        # throw in some includes...
        synthHandle.write('import HList::*;\n')
        synthHandle.write('import Vector::*;\n')
        synthHandle.write('import ModuleContext::*;\n')
        synthHandle.write('import GetPut::*;\n')
        synthHandle.write('import Clocks::*;\n')
        synthHandle.write('`include "awb/provides/virtual_platform.bsh"\n')
        synthHandle.write('`include "awb/provides/physical_platform.bsh"\n')
        generateWellKnownIncludes(synthHandle)
        # May need an extra import here?
        # get the platform module from the LIGraph            
        generateBAImport(platform_module_li, synthHandle)
        # include synth stub here....
        _emitSynthModule(platform_module_li, synthHandle, platform_module.interfaceType,
                         localPlatformName = moduleList.localPlatformName)

    ## Here we use a module list sorted alphabetically in order to guarantee
    ## the generated wrapper files are consistent.  The topological sort
    ## guarantees only a depth first traversal -- not the same traversal
    ## each time.
    synth_modules = [moduleList.topModule] + moduleList.synthBoundaries()

    ## Models have the option of declaring top-level clocks that will
    ## be exposed as arguments.  When top-level clocks exist a single
    ## top-level reset is also defined.  To request no top-level clocks
    ## the variable N_TOP_LEVEL_CLOCKS should be removed from a platform's
    ## AWB configuration file, since Bluespec can't test the value of
    ## a preprocessor variable.
    try:
      n_top_clocks = int(moduleList.getAWBParam('physical_platform', 'N_TOP_LEVEL_CLOCKS'))
      if (n_top_clocks == 0):
        sys.stderr.write("Error: N_TOP_LEVEL_CLOCKS may not be 0 due to Bluespec preprocessor\n")
        sys.stderr.write("       limitations.  To eliminate top-level clocks, remove the AWB\n")
        sys.stderr.write("       parameter from the platform configuration.\n")
        sys.exit(1)
    except:
      n_top_clocks = 0

    for module in synth_modules:
      modPath = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + module.buildPath + '/' + module.name
      wrapperPath =  modPath + "_Wrapper.bsv"
      logPath = modPath + "_Log.bsv"

      conSizePath =  modPath + "_Wrapper_con_size.bsh"
      ignorePath = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + module.buildPath + '/.ignore'

      # clear out code on clean.
      if moduleList.env.GetOption('clean'):
          os.system('rm -f ' + wrapperPath)
          os.system('rm -f ' + logPath)
          os.system('rm -f ' + conSizePath)
          os.system('rm -f ' + ignorePath)
          if (module.name != moduleList.topModule.name):
              os.system('rm -f ' + modPath + '.bsv') 
          continue

      if (model.getBuildPipelineDebug(moduleList) != 0):
        print "Wrapper path is " + wrapperPath
      wrapper_bsv = open(wrapperPath, 'w')

      ignore_bsv = open(ignorePath, 'w')
      ignore_bsv.write("// Generated by wrapper_gen.py\n\n")

      # Connection size doesn't appear on the first dependence pass, since it
      # doesn't exist until after the first build.  Finding it later results in
      # build dependence changes and rebuilding.  Ignore it, since the file will
      # change only when some other file changes.
      ignore_bsv.write(conSizePath)
      ignore_bsv.close()

      # Generate a dummy connection size file to avoid errors during dependence
      # analysis.
      if not os.path.exists(conSizePath):
          dummyModule = LIModule(module.name, module.name)
          bsh_handle = open(conSizePath, 'w')
          generateConnectionBSH(dummyModule, bsh_handle)
          bsh_handle.close()

      wrapper_bsv.write('import HList::*;\n')
      wrapper_bsv.write('import Vector::*;\n')
      wrapper_bsv.write('import ModuleContext::*;\n')
      # the top module is handled specially
      if (module.name == moduleList.topModule.name):
        generateWellKnownIncludes(wrapper_bsv)
        wrapper_bsv.write('// These are well-known/required leap modules\n')
        wrapper_bsv.write('// import non-synthesis public files\n')

        # Include all subordinate synthesis boundaries for use by
        # instantiateAllSynthBoundaries() below.
        # If we're doing a LIM build, there are no *_synth.bsv for use codes.
        # Probably if we're doing a build tree they aren't necessary either, but
        # removing those dependencies would take a little work. 
        if(first_pass_LI_graph is None):
            for synth in synth_modules:
                if synth != module:
                    wrapper_bsv.write('`include "' + synth.name + '_synth.bsv"\n')

        # Provide a method that imports all subordinate synthesis
        # boundaries.  It will be invoked inside the top level model
        # in order to build all soft connections
        use_build_tree = moduleList.getAWBParam('wrapper_gen_tool', 'USE_BUILD_TREE')
        expose_all_connections = 0
        try:
            expose_all_connections = moduleList.getAWBParam('model', 'EXPOSE_ALL_CONNECTIONS')
        except:
            pass

        if (use_build_tree == 1):
            wrapper_bsv.write('\n\n`ifdef  CONNECTION_SIZES_KNOWN\n');
            # build_tree.bsv will get generated later, during the
            # leap-connect phase.
            wrapper_bsv.write('    import build_tree_synth::*;\n'); 
            wrapper_bsv.write('    module [Connected_Module] instantiateAllSynthBoundaries#(Reset baseReset) ();\n')
            wrapper_bsv.write('        Reset rst <- mkResetFanout(baseReset);\n')
            wrapper_bsv.write('        let m <- build_tree(baseReset, reset_by rst);\n')
            wrapper_bsv.write('    endmodule\n')
            wrapper_bsv.write('`else\n');

        wrapper_bsv.write('\n    module ')
        if len(synth_modules) != 1:
            wrapper_bsv.write('[Connected_Module]')
        wrapper_bsv.write(' instantiateAllSynthBoundaries#(Reset baseReset) ();\n')

        for synth in synth_modules:
          if synth != module and not synth.platformModule:
              wrapper_bsv.write('        ' + synth.synthBoundaryModule + '();\n')

        wrapper_bsv.write('    endmodule\n')
        if (use_build_tree == 1):
            wrapper_bsv.write('`endif\n'); 


        # Import platform wrapper.
        wrapper_bsv.write('    import ' + moduleList.localPlatformName +'_platform_synth::*;\n'); 

        wrapper_bsv.write('    module [Connected_Module] instantiatePlatform ('+ platform_module.interfaceType +');\n')
        wrapper_bsv.write('        let m <- ' + moduleList.localPlatformName + '_platform(noReset);\n')
        wrapper_bsv.write('        return m;\n')
        wrapper_bsv.write('    endmodule\n')

        wrapper_bsv.write('`include "' + module.name + '.bsv"\n')

        wrapper_bsv.write('\n// import non-synthesis private files\n')
        wrapper_bsv.write('// Get defintion of TOP_LEVEL_WIRES\n')
        wrapper_bsv.write('import physical_platform::*;\n')
        wrapper_bsv.write('(* synthesize *)\n')

        wrapper_bsv.write('module [Module] mk_model_Wrapper\n')
        wrapper_bsv.write('    (TOP_LEVEL_WIRES);\n\n')
        wrapper_bsv.write('    // Instantiate main module\n')
        
        wrapper_bsv.write('    let m <- mkModel(clocked_by noClock, reset_by noReset);\n')

        wrapper_bsv.write('    return m;\n')
        wrapper_bsv.write('endmodule\n')

      else:
        log_bsv = open(logPath, 'w')
        log_bsv.write('import HList::*;\n')
        log_bsv.write('import ModuleContext::*;\n')

        # Parents of a synthesis boundary likely import the top level module of
        # the boundary.  This way, the synthesis boundary could be removed and
        # the code within the boundary would be imported correctly by the parent.
        # The code within the synthesis boundary will actually be imported at the
        # top level instead, so we need a dummy module for use by the parent of
        # a boundary that looks like it imports the code but actually does nothing.
        # Importing at the top level allows us to build all synthesis regions
        # in parallel.
        dummy_import_bsv = open(modPath + '.bsv', 'w')
        dummy_import_bsv.write('// Generated by wrapper_gen.py\n\n')
        dummy_import_bsv.write('module ' + module.synthBoundaryModule + ' ();\n');
        dummy_import_bsv.write('endmodule\n');
        dummy_import_bsv.close()

        if not os.path.exists(modPath + '_synth.bsv'):            
            dummy_module = LIModule(module.name, module.name)
            handle = getSynthHandle(moduleList, module)
            generateSynthWrapper(dummy_module, handle, moduleList.localPlatformName,
                                 moduleType = module.interfaceType,
                                 extraImports = module.extraImports)

        for wrapper in [wrapper_bsv, log_bsv]:      
            wrapper.write('// These are well-known/required leap modules\n')
            generateWellKnownIncludes(wrapper)
            wrapper.write('`include "awb/provides/librl_bsv_base.bsh"\n')
            wrapper.write('// import non-synthesis public files\n')
            wrapper.write('`include "' + module.name + '_compile.bsv"\n')
            wrapper.write('\n\n')
            
        log_bsv.write('// First pass to see how large the vectors should be\n')
        log_bsv.write('`define CON_RECV_' + module.boundaryName + ' 100\n')
        log_bsv.write('`define CON_SEND_' + module.boundaryName + ' 100\n')
        log_bsv.write('`define CON_RECV_MULTI_' + module.boundaryName + ' 50\n')
        log_bsv.write('`define CON_SEND_MULTI_' + module.boundaryName + ' 50\n')
        log_bsv.write('`define CHAINS_' + module.boundaryName + ' 50\n')
        wrapper_bsv.write('// Real build pass.  Include file built dynamically.\n')
        wrapper_bsv.write('`include "' + module.name + '_Wrapper_con_size.bsh"\n')

        for wrapper in [wrapper_bsv, log_bsv]:      
            wrapper.write('(* synthesize *)\n')
            wrapper.write('module [Module] ' + module.wrapperName() + '#(Reset baseReset) (SOFT_SERVICES_SYNTHESIS_BOUNDARY#(`CON_RECV_' + module.boundaryName + ', `CON_SEND_' + module.boundaryName + ', `CON_RECV_MULTI_' + module.boundaryName + ', `CON_SEND_MULTI_' + module.boundaryName +', `CHAINS_' + module.boundaryName +', ' + module.interfaceType + '));\n')
            wrapper.write('  \n')
            # we need to insert the fpga platform here
            # get my parameters 

            wrapper.write('    // instantiate own module\n')
            wrapper.write('    let int_ctx0 <- initializeServiceContext();\n')
            wrapper.write('    match {.int_ctx1, .int_name1} <- runWithContext(int_ctx0, putSynthesisBoundaryID(fpgaNumPlatforms() + ' + str(module.synthBoundaryUID + uidOffset)  + '));\n');
            wrapper.write('    match {.int_ctx2, .int_name2} <- runWithContext(int_ctx1, putSynthesisBoundaryPlatform("' + moduleList.localPlatformName + '"));\n')
            wrapper.write('    match {.int_ctx3, .int_name3} <- runWithContext(int_ctx2, putSynthesisBoundaryPlatformID(' + str(moduleList.localPlatformUID) + '));\n')
            wrapper.write('    match {.int_ctx4, .int_name4} <- runWithContext(int_ctx3, putSynthesisBoundaryName("' + str(module.boundaryName) + '"));\n')
            wrapper.write('    // By convention, global string ID 0 (the first string) is the module name\n');
            wrapper.write('    match {.int_ctx5, .int_name5} <- runWithContext(int_ctx4, getGlobalStringUID("' + moduleList.localPlatformName + ':' + module.name + '"));\n');
            wrapper.write('    match {.int_ctx6, .module_ifc} <- runWithContext(int_ctx5, ' + module.synthBoundaryModule + ');\n')
            
            # Need to expose clocks of the platform Module
            if(module.platformModule):
                wrapper.write('    match {.clk, .rst} = extractClocks(module_ifc);\n')
                wrapper.write('    match {.int_ctx7, .int_name7} <- runWithContext(int_ctx6, mkSoftConnectionDebugInfo(clocked_by clk, reset_by rst));\n')
                wrapper.write('    match {.final_ctx, .m_final}  <- runWithContext(int_ctx7, mkSoftConnectionLatencyInfo(clocked_by clk, reset_by rst));\n')                
            else:
                wrapper.write('    match {.int_ctx7, .int_name7} <- runWithContext(int_ctx6, mkSoftConnectionDebugInfo);\n')
                wrapper.write('    match {.final_ctx, .m_final}  <- runWithContext(int_ctx7, mkSoftConnectionLatencyInfo);\n')
            wrapper.write('    let service_ifc <- exposeServiceContext(final_ctx);\n')
            wrapper.write('    interface services = service_ifc;\n')
            wrapper.write('    interface device = module_ifc;\n')
            wrapper.write('endmodule\n')
    
        log_bsv.close()

      wrapper_bsv.close()
Пример #9
0
    def setupTreeBuild(self, moduleList, topo):
        useBVI = self.parent.USE_BVI
        env = moduleList.env

        root_directory = model.rootDir

        ##
        ## Load intra-Bluespec dependence already computed.  This
        ## information will ultimately drive the building of Bluespec
        ## modules. Build tree has a few dependencies which must be
        ## captured.
        ##

        ## If we aren't building the build tree, don't bother with its dependencies
        env.ParseDepends(get_build_path(moduleList, moduleList.topModule) + '/.depends-build-tree',
                         must_exist = not moduleList.env.GetOption('clean'))
        tree_base_path = env.Dir(get_build_path(moduleList, moduleList.topModule))

        tree_file_synth = tree_base_path.File('build_tree_synth.bsv')
        tree_file_synth_bo_path = tree_base_path.File(self.parent.TMP_BSC_DIR + '/build_tree_synth.bo')

        tree_file_wrapper = tree_base_path.File('build_tree_Wrapper.bsv')
        tree_file_wrapper_bo_path = tree_base_path.File(self.parent.TMP_BSC_DIR + '/build_tree_Wrapper.bo')

        # Area constraints
        area_constraints = None
        try:
            if (moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_ENABLE')):
                area_constraints = area_group_tool.AreaConstraints(moduleList)
        except:
            # The area constraints code is not present.
            pass

        boundary_logs = []
        for module in topo:
            # Remove any platform modules.. These are special in that
            # they can have wired interfaces.
            if (not module.platformModule):
                for log in module.moduleDependency['BSV_LOG']:
                    boundary_logs += [root_directory.File(log)]
        ##
        ## Back to SCons configuration (first) pass...
        ##

        top_module_path = get_build_path(moduleList, moduleList.topModule)

        # Inform object code build of the LI Graph retrieved from the
        # first pass.  Probe firstPassGraph for relevant object codes
        # (BA/NGC/BSV_SYNTH/BSV_SYNTH_BSH) accessed:
        # module.objectCache['NGC'] (these already have absolute
        # paths) I feel like the GEN_BAS/GEN_VERILOGS of the first
        # pass may be missing.  We insert these modules as objects in
        # the ModuleList.

        def makeAWBLink(doLink, source, buildPath, uniquifier=''):
            base_file = os.path.basename(str(source))
            link_dir = buildPath + '/.li'
            link_path =  link_dir + '/' + uniquifier + base_file
            if (doLink):
                if (os.path.lexists(link_path)):
                    os.remove(link_path)
                rel = os.path.relpath(str(source), link_dir)
                print 'Linking: ' + link_path + ' -> ' + rel
                os.symlink(rel, link_path)

            return link_path


        limLinkUserSources = []
        limLinkUserTargets = []
        limLinkPlatformSources = []
        limLinkPlatformTargets = []
        importStubs = []


        if (not self.getFirstPassLIGraph is None):
            # Now that we have demanded bluespec builds (for
            # dependencies), we should now should downgrade synthesis boundaries for the backend.
            oldStubs = []
            for module in topo:
                if(not module.platformModule):
                    if((not module.name in self.getFirstPassLIGraph.modules) or (self.getFirstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)): 
                        module.liIgnore = True
                    # this may not be needed.
                    else:
                        oldStubs += module.moduleDependency['GEN_VERILOG_STUB']

            # let's pick up the platform dependencies, since they are also special.
            env.ParseDepends(get_build_path(moduleList, moduleList.topModule) + '/.depends-platform',
                             must_exist = not moduleList.env.GetOption('clean'))

            # Due to the way that string files are
            # generated, they are difficult to rename in
            # the front-end compilation. This leads to
            # collisions amoung similarly-typed LI
            # Modules.  We fix it by uniquifying the links.

            def getModuleName(module):
                return module.name

            def getEmpty(module):
                return ''

            linkthroughMap = {'BA': getEmpty, 'GEN_BAS': getEmpty, 'GEN_VERILOGS': getEmpty, 'GEN_VERILOG_STUB': getEmpty, 'STR': getModuleName}

            buildPath = get_build_path(moduleList, moduleList.topModule)
            for module in self.getFirstPassLIGraph.modules.values():
            
                # do not link through those modules marked for resynthesis. 
                if(not module.getAttribute('RESYNTHESIZE') is None):  
                    continue

                moduleDeps = {}

                for objType in linkthroughMap:
                    if(objType in module.objectCache):
                        localNames =  map(lambda fileName: makeAWBLink(False,
                                                                       fileName.from_bld(),
                                                                       buildPath, 
                                                                       uniquifier=linkthroughMap[objType](module)),
                                          module.objectCache[objType])

                        # The previous passes GEN_VERILOGS are not
                        # really generated here, so we can't call them
                        # as such. Tuck them in to 'VERILOG'
                        if(objType == 'GEN_VERILOG_STUB'):
                            oldStubs += localNames                        
                        moduleDeps[objType] = localNames

                        if (module.getAttribute('PLATFORM_MODULE') is None):
                            limLinkUserTargets += localNames
                            limLinkUserSources += module.objectCache[objType]
                        else:
                            limLinkPlatformTargets += localNames
                            limLinkPlatformSources += module.objectCache[objType]

                m = Module(module.name, ["mk_" + module.name + "_Wrapper"],\
                           moduleList.topModule.buildPath, moduleList.topModule.name,\
                           [], moduleList.topModule.name, [], moduleDeps)

                moduleList.insertModule(m)
        else:
            # The top module/build pipeline only depend on non-platformModules
            oldStubs = [module.moduleDependency['GEN_VERILOG_STUB'] for module in moduleList.synthBoundaries() if not module.platformModule]




        ## Enumerate the dependencies created by the build tree.
        buildTreeDeps = {}

        ## We have now generated a completely new module. Let's throw it
        ## into the list.  Although we are building it seperately, this
        ## module is an extension to the build tree.
        expected_wrapper_count = len(boundary_logs) - 2
        importBOs = []

        if (not self.getFirstPassLIGraph is None):
            # we now have platform modules in here.
            expected_wrapper_count = len(self.getFirstPassLIGraph.modules) - 2

            # If we have an LI graph, we need to construct and compile
            # LI import wrappers for the modules we received from the
            # first pass.  Do that here.  include all the dependencies
            # in the graph in the wrapper.
            liGraph = LIGraph([])
            firstPassGraph = self.getFirstPassLIGraph
            # We should ignore the 'PLATFORM_MODULE'
            liGraph.mergeModules([ module for module in bsv_tool.getUserModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None])
            for module in sorted(liGraph.graph.nodes(), key=lambda module: module.name):
                # pull in the dependecies generate by the dependency pass.
                env.ParseDepends(str(tree_base_path) + '/.depends-' + module.name,
                                 must_exist = not moduleList.env.GetOption('clean'))
                wrapper_path = tree_base_path.File(module.name + '_Wrapper.bsv')
                wrapper_bo_path = tree_base_path.File(self.parent.TMP_BSC_DIR + '/' + module.name + '_Wrapper.bo')

                # include commands to build the wrapper .bo/.ba
                # Here, we won't be using the generated .v (it's garbage), so we intentionally  get rid of it.
                importVDir = env.Dir('.lim_import_verilog')
                if not os.path.isdir(str(importVDir)):
                   os.mkdir(str(importVDir))

                wrapper_command = self.parent.compile_bo_bsc_base([wrapper_bo_path], get_build_path(moduleList, moduleList.topModule), vdir=importVDir) + ' $SOURCES'
                wrapper_bo = env.Command([wrapper_bo_path],
                                         [wrapper_path],
                                         wrapper_command)
                # create BO.
                importBOs += [wrapper_bo]

        verilog_deps = [ "__TREE_MODULE__" + str(id) for id in range(expected_wrapper_count)]

        if(self.parent.BUILD_LOGS_ONLY == 0):
            buildTreeDeps['GEN_VERILOGS'] = ["mk_" + vlog + '_Wrapper' + ".v"  for vlog in verilog_deps]
        else:
            buildTreeDeps['GEN_VERILOGS'] = []

        buildTreeDeps['GEN_BAS'] = [  "mk_" + vlog + '_Wrapper' + ".ba" for vlog in verilog_deps]
        buildTreeDeps['BA'] = []
        buildTreeDeps['STR'] = []
        buildTreeDeps['VERILOG'] = [top_module_path + '/' + self.parent.TMP_BSC_DIR + '/mk_build_tree_Wrapper.v']
        buildTreeDeps['GIVEN_BSVS'] = []
        buildTreeDeps['VERILOG_STUB'] = model.convertDependencies(oldStubs)

        tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\
                             moduleList.topModule.name,\
                             [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True)

        tree_module.putAttribute('LI_GRAPH_IGNORE', True)

        moduleList.insertModule(tree_module)    
        wrapper_gen_tool.generateAWBCompileWrapper(moduleList, tree_module)

        ## This produces the treeNode BSV. It must wait for the
        ## compilation of the log files, which it will read to form the
        ## LIM graph
        ##
        ## We do two operations during this phase.  First, we dump a
        ## representation of the user program. This representation is
        ## used by the LIM compiler to create heterogeneous
        ## executables.  We then do a local modification to the build
        ## tree to reduce Bluespec compilation time.

        # If I got an LI graph, I don't care about the boundary logs.
        # In this case, everything comes from the first pass graph.

        # Usually, we only need logs and BOs to build the build tree.
        # However, during the second pass build we also need to fill
        # in information about area group paths (changed by tree build)
        tree_build_deps = boundary_logs + importBOs
        tree_build_results = [tree_file_wrapper, tree_file_synth]

        if (self.getFirstPassLIGraph and area_constraints):
            tree_build_deps += [area_constraints.areaConstraintsFilePlaced()]
            tree_build_results += [area_constraints.areaConstraintsFile()]

        ##
        ## The cutTreeBuild builder function needs some of the local state
        ## in the current function.  Build a dictionary with the required
        ## state and partial instance of cutTreeBuild with the state applied.
        ##
        cut_tree_state = dict()
        cut_tree_state['area_constraints'] = area_constraints
        cut_tree_state['boundary_logs'] = boundary_logs
        cut_tree_state['moduleList'] = moduleList
        cut_tree_state['tree_file_synth'] = tree_file_synth
        cut_tree_state['tree_file_wrapper'] = tree_file_wrapper

        cut_tree_build = functools.partial(self.cutTreeBuild, cut_tree_state)
        cut_tree_build.__name__ = 'cutTreeBuild'

        tree_components = env.Command(tree_build_results,
                                      tree_build_deps,
                                      cut_tree_build)

        ## Compiling the build tree wrapper produces several .ba
        ## files, some that are useful, the TREE_MODULES, and some
        ## which are not, the _Wrapper.ba.  As a result, we dump the
        ## tree build output to a different directory, so as not to
        ## pollute the existing build.  Here, we link to the relevant
        ## files in that directory.

        def linkLIMObjClosure(liModules, buildPath):
            def linkLIMObj(target, source, env):
                if (not self.getFirstPassLIGraph is None):
                    # The LIM build has passed us some source and we need
                    # to patch it through.
                    for module in liModules:
                        for objType in linkthroughMap:
                            if(objType in module.objectCache):                
                                map(lambda fileName: makeAWBLink(True, fileName.from_bld(), buildPath, uniquifier=linkthroughMap[objType](module)),
                                    module.objectCache[objType])

            return linkLIMObj


        ## The top level build depends on the compilation of the tree components
        ## into bo/ba/v files.

        # the GEN_BAS attached to the build tree need to be massaged
        # to reflect their actual path.  Perhaps we should be using
        # some kind of object that makes these sorts of conversions
        # simpler.

        producedBAs = map(lambda path: bsv_tool.modify_path_ba(moduleList, path), moduleList.getModuleDependenciesWithPaths(tree_module, 'GEN_BAS'))
        producedVs = map(lambda path: bsv_tool.modify_path_ba(moduleList, path), moduleList.getModuleDependenciesWithPaths(tree_module, 'GEN_VERILOGS')) + \
                     buildTreeDeps['VERILOG']

        tree_command = self.parent.compile_bo_bsc_base([tree_file_wrapper_bo_path], get_build_path(moduleList, moduleList.topModule)) + ' ' + tree_file_wrapper.path
        tree_file_wrapper_bo = env.Command([tree_file_wrapper_bo_path] + producedBAs + producedVs,
                                           tree_components,
                                           tree_command)

        # If we got a first pass LI graph, we need to link its object codes.
        if (not self.getFirstPassLIGraph is None):
            srcs = [s.from_bld() for s in limLinkUserSources]
            link_lim_user_objs = env.Command(limLinkUserTargets,
                                             srcs,
                                             linkLIMObjClosure([ module for module in bsv_tool.getUserModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None],
                                                               tree_base_path.path))
            env.Depends(link_lim_user_objs, tree_file_wrapper_bo)


        # the tree_file_wrapper build needs all the wrapper bo from the user program,
        # but not the top level build.
        top_bo = moduleList.topModule.moduleDependency['BSV_BO']
        all_bo = moduleList.getAllDependencies('BO')

        env.Depends(tree_file_wrapper_bo, all_bo)

        tree_synth_command = self.parent.compile_bo_bsc_base([tree_file_synth_bo_path], get_build_path(moduleList, moduleList.topModule)) + ' ' + tree_file_synth.path
        tree_file_synth_bo = env.Command([tree_file_synth_bo_path],
                                         [tree_file_synth, tree_file_wrapper_bo],
                                         tree_synth_command)

        env.Depends(top_bo, tree_file_synth_bo)
        env.Depends(moduleList.topModule.moduleDependency['BSV_LOG'],
                    tree_file_synth_bo)


        #Handle the platform_synth build, which is special cased.
        platform_synth = get_build_path(moduleList, moduleList.topModule) + "/" +  moduleList.localPlatformName + "_platform_synth.bsv"
        platform_synth_bo_path = get_build_path(moduleList, moduleList.topModule) + "/" + self.parent.TMP_BSC_DIR +"/" + moduleList.localPlatformName + "_platform_synth"
        # if we are in the lim linking phase, we need to change the
        # vdir directory to hide the spurious verilog generated by
        # bluespec.
        importVDir = None
        if(not self.getFirstPassLIGraph is None):
            importVDir = env.Dir('.lim_import_verilog')
            if not os.path.isdir(str(importVDir)):
                os.mkdir(str(importVDir))

        platform_synth_command = self.parent.compile_bo_bsc_base([platform_synth_bo_path + '.bo'], get_build_path(moduleList, moduleList.topModule), vdir=importVDir) + ' $SOURCE'
        platform_wrapper_bo = get_build_path(moduleList, moduleList.topModule) + "/" + self.parent.TMP_BSC_DIR + "/" +moduleList.localPlatformName + '_platform_Wrapper.bo'

        platform_synth_deps = [platform_synth]
        #if we have a module graph, we don't require the compilation of the platform_wrapper_bo.
        if (self.getFirstPassLIGraph is None):
            platform_synth_deps.append(platform_wrapper_bo)
        platform_synth_bo = env.Command([platform_synth_bo_path + '.bo'],
                                         platform_synth_deps,
                                         platform_synth_command)
        # this produces a ba also?
        env.Depends(moduleList.topModule.moduleDependency['BSV_LOG'],
                        platform_synth_bo)

        # Platform synth does the same object-bypass dance as tree_module.
        if(not self.getFirstPassLIGraph is None):
            srcs = [s.from_bld() for s in limLinkPlatformSources]
            link_lim_platform_objs = env.Command(limLinkPlatformTargets,
                                                 srcs,
                                                 linkLIMObjClosure([ module for module in bsv_tool.getPlatformModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None],
                                                                   tree_base_path.path))
            env.Depends(link_lim_platform_objs, platform_synth_bo)

        # need to generate a stub file for the build tree module.
        # note that in some cases, there will be only one module in
        # the graph, usually in a multifpga build.  In this case,
        # the build_tree module will be vestigal, but since we can't
        # predict this statically we'll have to build it anyway.

        tree_module.moduleDependency['GEN_VERILOG_STUB'] = [self.parent.stubGenCommand(top_module_path,
                                                                                       "build_tree",
                                                                                       top_module_path + '/' + self.parent.TMP_BSC_DIR + "/mk_build_tree_Wrapper.v")]

        # top level only depends on platform modules
        moduleList.topModule.moduleDependency['VERILOG_STUB'] = model.convertDependencies([module.moduleDependency['GEN_VERILOG_STUB'] for module in moduleList.synthBoundaries() if module.platformModule])
        if(not self.getFirstPassLIGraph is None):
            #Second pass build picks up stub files from the first pass build.
            moduleList.topModule.moduleDependency['VERILOG_STUB'] += model.convertDependencies(oldStubs)
Пример #10
0
class ModuleTests(unittest.TestCase):
    def setUp(self):
        self.m = Module()

    def tearDown(self):
        pass

    def depth(self):
        return len(self.m.cells)

    def width(self):
        return len(self.m.cells[0])

    def createGridAndTieCell0Input(self,
                                   wIn,
                                   wOut,
                                   width,
                                   depth=1,
                                   initValForCell0=False):
        self.m.createGrid(wIn, wOut, width, depth)
        self.m.tieCell0([initValForCell0])

    def testInit4x1(self):
        self.createGridAndTieCell0Input(4, 4, 4, 1)

        self.assertTrue(self.depth() == 1)
        self.assertTrue(self.width() == 4)

    def testInitNxN(self):
        self.createGridAndTieCell0Input(7, 7, 7, 6)

        self.assertTrue(self.depth() == 6)
        self.assertTrue(self.width() == 7)

    def test2x1AndTiedLow(self):
        self.createGridAndTieCell0Input(2, 2, 2, 1)

        self.m.driveInputs([True, True])

        self.assertEqual(self.m.sampleOutputs(), [False, False])

    def test2x1AndTiedHigh(self):
        self.createGridAndTieCell0Input(2, 2, 2, 1, True)

        self.m.driveInputs([True, True])

        self.assertEqual(self.m.sampleOutputs(), [True, True])

    def test3x1AndTiedHigh(self):
        self.createGridAndTieCell0Input(3, 3, 3, 1, True)

        self.m.driveInputs([True, True, False])

        self.assertEqual(self.m.sampleOutputs(), [True, True, False])

    def test2x2AndTiedHigh(self):
        self.createGridAndTieCell0Input(2, 2, 2, 2, True)

        self.m.driveInputs([True, True])
        self.assertEqual(self.m.sampleOutputs(), [True, True])

        self.m.driveInputs([True, False])
        self.assertEqual(self.m.sampleOutputs(), [False, False])

    def test3x2AndTiedHigh(self):
        self.createGridAndTieCell0Input(3, 3, 3, 2, True)

        self.m.driveInputs([True, True, True])
        self.assertEqual(self.m.sampleOutputs(), [True, True, True])

        self.m.driveInputs([True, False, True])
        self.assertEqual(self.m.sampleOutputs(), [False, False, False])

    def testFixNumberOfFlopsTo0(self):
        self.createGridAndTieCell0Input(25, 25, 25, 14, True)
        self.m.setNumFlops(0)

        self.assertTrue(self.m.getNumFlops() == 0)

    def testFixNumberOfFlopsToLtWidth(self):
        self.createGridAndTieCell0Input(25, 25, 25, 14, True)
        self.m.setNumFlops(17)

        self.assertTrue(self.m.getNumFlops() == 17)

    def testFixNumberOfFlopsToGtWidth(self):
        self.createGridAndTieCell0Input(25, 25, 25, 14, True)
        self.m.setNumFlops(28)

        self.assertTrue(self.m.getNumFlops() == 28)

    def testFixNumberOfFlopsToMax(self):
        self.createGridAndTieCell0Input(25, 25, 25, 14, True)
        self.m.setNumFlops(25 * 14)

        self.assertTrue(self.m.getNumFlops() == (25 * 14))

    def test2x1FloppedAndTiedHigh(self):
        self.createGridAndTieCell0Input(2, 2, 2, 1, True)
        self.m.setNumFlops(2)

        self.m.driveInputs([True, True])

        self.m.clk()
        self.assertEqual(self.m.sampleOutputs(), [True, False])

        self.m.clk()
        self.assertEqual(self.m.sampleOutputs(), [True, True])

    def testOutputMuxOnlyExistsWhenOutputSmallerThanInputWidth(self):
        self.createGridAndTieCell0Input(2, 2, 2)
        self.assertEqual(self.m.outputMux, None)

    def testOutputMuxForMoreInputsThanOutputs(self):
        self.createGridAndTieCell0Input(2, 1, 2)
        self.assertNotEqual(self.m.outputMux, None)

    def testOutputSizeFor2Inputs1Output(self):
        self.createGridAndTieCell0Input(2, 1, 2)
        self.m.driveInputs([True, True])
        self.assertEqual(len(self.m.sampleOutputs()), 1)

    def testOutputFor2Inputs1Output(self):
        self.createGridAndTieCell0Input(2, 1, 2, 1, True)

        self.m.driveInputs([True, True])

        self.assertEqual(self.m.sampleOutputs(), [True])

    def testOutputFor3Inputs2Output(self):
        self.createGridAndTieCell0Input(3, 2, 3, 1, True)

        self.m.driveInputs([True, True, False])

        self.assertEqual(self.m.sampleOutputs(), [True, False])

    def testOutputFor4Inputs3Output(self):
        self.createGridAndTieCell0Input(4, 3, 4, 1, True)

        self.m.driveInputs([True, True, True, False])

        self.assertEqual(self.m.sampleOutputs(), [True, True, False])

    def testOutputFor5Inputs4Output(self):
        self.createGridAndTieCell0Input(5, 4, 5, 1, True)

        self.m.driveInputs([True, True, True, False, False])

        self.assertEqual(self.m.sampleOutputs(), [True, True, False, False])

    def testOutputFor8Inputs5Output(self):
        self.createGridAndTieCell0Input(8, 5, 8, 1, True)

        self.m.driveInputs([True] * 6 + [False, False])

        self.assertEqual(self.m.sampleOutputs(),
                         [True, True, True, False, False])

    def testModuleHasFixedCells(self):
        self.createGridAndTieCell0Input(2, 2, 2)
        self.m.setNumFlops(2)
        self.m.driveInputs([True] * 2)
        self.m.clk()
        self.m.sampleOutputs()
        self.m.clk()
        self.m.sampleOutputs()
        self.assertTrue(self.m.moduleHasFixedCells())

    def testModuleHasNoFixedCells(self):
        self.createGridAndTieCell0Input(2, 2, 2, 1, True)
        self.m.cells[0][1].setOutputType(OutputType.sync)
        self.m.driveInputs([True] * 2)
        self.m.clk()
        self.m.sampleOutputs()
        self.m.driveInputs([False] * 2)
        self.m.clk()
        self.m.sampleOutputs()
        self.assertFalse(self.m.moduleHasFixedCells())

    def testOutputHistory(self):
        self.createGridAndTieCell0Input(2, 2, 2, 1, True)

        self.m.driveInputs([True, True])
        self.m.sampleOutputs()
        self.m.sampleOutputs()
        self.m.sampleOutputs()
        self.assertEqual(len(self.m.outputHistory()), 3)
        self.assertEqual(self.m.outputHistory(),
                         [[True, True], [True, True], [True, True]])
        self.assertTrue(self.m.outputsFixed())

    def testOutputsNotFixed(self):
        self.createGridAndTieCell0Input(2, 2, 2, 1, True)

        self.m.driveInputs([True, True])
        self.m.sampleOutputs()
        self.m.driveInputs([False, False])
        self.m.sampleOutputs()
        self.assertFalse(self.m.outputsFixed())

    def testOutputFor1Input2Outputs(self):
        self.createGridAndTieCell0Input(1, 2, 2, 1, True)

        self.m.driveInputs([True])

        self.assertEqual(self.m.sampleOutputs(), [True, True])

    def testOutputFor2Input4Outputs(self):
        self.createGridAndTieCell0Input(2, 4, 4, 1, True)

        self.m.driveInputs([True, True])

        self.assertEqual(self.m.sampleOutputs(), [True, True] * 2)

    def testOutputForLargerGridWidth(self):
        self.createGridAndTieCell0Input(2, 4, 6, 1, True)

        self.m.driveInputs([True, True])

        self.assertEqual(self.m.sampleOutputs(), [True, True] * 2)
Пример #11
0
 def setUp(self):
     self.cycles = 100
     self.m = Module()
Пример #12
0
class GameServer(object):
    def __init__(self, ip, port):
        self.ip = ip
        self.port = port
        self.buffsize = 2048**2
        self.main_socket = s.socket(s.AF_INET, s.SOCK_STREAM)  # Create socketTCP
        ###
        self.main_socket.setsockopt(s.SOL_SOCKET, s.SO_REUSEADDR, self.port)
        ###
        self.main_socket.bind((self.ip, self.port))
        self.main_socket.listen(5)
        self.server_is_open = True  # is the game server on?

        self.games = []  # list of on-going games
        self.names = {}  # dictionary of client and his game name
        self.timers = {}  # dictionary of each game and its time state (True/False)  {Game:T/F}
        self.question_start_time = {}  # dictionary of {Game:current_question_start_time}
        self.wall_displays = {}  # dictionary of all wall display sockets and their games {socket:Game}
        self.next_question_request = []  # list of wall displays that requested a next question (for a specific round)
        self.validated = {}  # dictionary of clients and their validated games
        self.validated_login = {}  # dictionary of validated username and password clients, values: T/number of failed logins

        self.messages = {}  # {socket:[messages],...}

        self.inputs = [self.main_socket]
        self.outputs = []
        self.connected = {}  # dictionary of connected clients (including clients not in games) {clientobj:(Game, player_name),...}

        self.m = Module()  # module object for database interaction

    @staticmethod
    def get_current_time():
        return time.clock()

    @staticmethod
    def run_command(cmd):
        """
        Spawn new processes, connect to their input/output/error pipes, and obtain their return codes
        arg: cmd
        arg type: string
        ret type: string
        """
        return subprocess.Popen(cmd,
                                shell=True,  # not recommended, but does not open a window
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                stdin=subprocess.PIPE).communicate()

    @staticmethod
    def get_host_ip():
        """

        :return: the computer's ip address
        """
        ip = GameServer.run_command('ipconfig /all')[0].split("\n")
        for i in ip:
            if 'IPv4 Address' in i and '(Preferred)' in i:
                ip = i.split()[-1]
                ip = ip[0:ip.find('(')]
                break
            else:
                ip = 'localhost'  # no specific address was found use localhost
        return ip

    @staticmethod
    def get_open_ports():
        '''

        :return: first open port number
        '''
        sock = s.socket(s.AF_INET, s.SOCK_STREAM)
        sock.bind(("", 0))
        sock.listen(1)
        port = sock.getsockname()[1]
        sock.close()
        return port

    @staticmethod
    def update_host_and_port(host, port):
        try:
            print 'Updating Server information on Kahoot_db Server...\r\nKahoot_play Server at:\r\nHost: %s; Port: %d' \
                  % (host, port)
            result = kht.update(name='YuvalStein', ip=host, port=port)
            if result['status'] == 'OK':
                print 'Information updated successfully.'
            else:
                print(result['status'])
                raise result['status']
        except Exception as e:
            print e
            print 'Connection to kahoot_db Server failed, Please manually update the client.'

    def handle_game(self):
        while self.inputs and self.server_is_open:
            readables, writeables, exceptions = select(self.inputs, self.outputs, [])

            for sockobj in readables:

                if sockobj is self.main_socket:
                    clientobj, addr = self.main_socket.accept()
                    print 'connected from', addr
                    self.inputs.append(clientobj)
                    self.outputs.append(clientobj)
                    self.messages[clientobj] = []  # this is where the clients messages will be saved
                    self.connected[clientobj] = None  # client not in game currently

                else:
                    # Client socket
                    data = None
                    try:
                        data = sockobj.recv(self.buffsize)  # receive the data the client sent
                        print 'Got', (data), 'on', id(sockobj)
                    except Exception as e:
                        print e
                    if not data:  # close connection
                        self.inputs.remove(sockobj)
                        self.outputs.remove(sockobj)
                        del self.connected[sockobj]
                        # client exits in the middle of the game
                        if sockobj in self.validated:
                            game = self.validated[sockobj]
                            player_name = self.names[sockobj]
                            del game.players[player_name]
                            if len(game.players) < 2:
                                for socketobj, player in self.names.items():
                                    if player != player_name:
                                        self.add_data(socketobj, ['error', 'Not enough players in game.'])
                                for socketobj, _game in self.wall_displays.items():
                                    if _game == game and sockobj != socketobj:
                                        self.add_data(socketobj, ['error', 'Not enough players in game.'])
                                del self.validated[sockobj]
                                del self.names[sockobj]
                                if game in self.games:
                                    del self.games[self.games.index(game)]

                        if sockobj in self.validated_login and self.validated_login[sockobj] is True:
                            del self.validated_login[sockobj]

                        # wall clients
                        if sockobj in self.wall_displays:
                            game = self.wall_displays[sockobj]
                            del self.wall_displays[sockobj]
                            # disconnect players
                            for socketobj in self.names:  # inform all connected players that game is over
                                if socketobj in self.validated and self.validated[socketobj] == game:
                                    self.add_data(socketobj, ['error', 'Wall Display died.'])
                            if game in self.games:
                                del self.games[self.games.index(game)]

                            if sockobj in self.next_question_request:
                                del self.next_question_request[self.next_question_request.index(sockobj)]

                        # update everyone with the information
                        self.send_all_data()

                    else:  # handle data
                        num = 0
                        prev_index = 0
                        tmp_data = ""
                        orig_data = data
                        for i in range(len(orig_data)):
                            if orig_data[i] == '[':
                                num += 1
                            elif orig_data[i] == ']':
                                num -= 1
                            if num == 0:
                                if prev_index == 0:
                                    tmp_data = orig_data[:i + 1]
                                else:
                                    tmp_data = orig_data[prev_index:i + 1]
                                data = eval(tmp_data)
                                self.handle_data(data, sockobj)
                                prev_index = i + 1

                        self.send_all_data()

        # self.server_is_open = False:
        self.close_clients()
        self.close()

    def handle_data(self, data, sockobj):
        if data[0] == 'close_server':
            self.server_is_open = False
        elif data[0] == 'wall_display':
            if data[1] == 'start_update':  # get important data in the beginning of the game
                self.get_connected_players(sockobj)
            elif data[1] == 'game_update':
                if data[2] == 'first_question':
                    game = self.wall_displays[sockobj]
                    game.started = True  # update boolean 'started'
                    self.timers[game] = True
                    self.question_start_time[game] = self.get_current_time()
                    # information for wall display
                    info = ['wall_display', 'game_update', 'first_question'] + self.get_current_question_info(sockobj)
                    self.add_data(sockobj, info)

                    # information for all the connected clients to this game (possible answers list)
                    current_answers = game.get_current_answers()
                    for clientobj in self.connected:
                        # if client is in the same game of the wall display
                        if self.connected[clientobj] and self.connected[clientobj][0] == self.wall_displays[sockobj]:
                            # send the client the current answers
                            self.add_data(clientobj, ['game', 'answers'] + current_answers)
                elif data[2] == 'game_update':
                    self.get_number_of_answers(sockobj)  # returns the current question and the number of answers
                elif data[2] == 'round_results':
                    self.timers[self.wall_displays[sockobj]] = False
                    self.get_round_info(sockobj)
                    self.next_question_request = []
            elif data[1] == 'next_question':
                if sockobj not in self.next_question_request:
                    game = self.wall_displays[sockobj]
                    self.timers[game] = True
                    self.question_start_time[game] = self.get_current_time()
                    game.player_answers = {}  # new round of answers
                    game.current_question += 1  # update question number
                    info = ['wall_display', 'next_question'] + self.get_current_question_info(sockobj)
                    self.next_question_request.append(sockobj)
                    self.add_data(sockobj, info)

                    # information for all the connected clients to this game (possible answers list)
                    current_answers = game.get_current_answers()
                    for clientobj in self.connected:
                        # if client is in the same game of the wall display
                        if self.connected[clientobj] and self.connected[clientobj][0] == self.wall_displays[sockobj]:
                            # send the client the current answers
                            info = ['game', 'answers'] + current_answers
                            self.add_data(clientobj, info)
            elif data[1] == 'end_questionnaire':  # game ended
                self.timers[self.wall_displays[sockobj]] = False
                self.end_game(sockobj)

            else:  # unknown data
                pass

        elif data[0] == 'game':
            if data[1] == 'answer':
                player_name = data[3]
                # check if player already gave an answer or question time is over
                if self.timers[self.validated[sockobj]]:  # don't take answer if question time is up
                    if player_name not in self.validated[sockobj].player_answers or \
                            not self.validated[sockobj].player_answers[player_name]:
                        self.update_player_answer(sockobj, data[2], player_name)

        elif data[0] == 'join':
            if data[1] == 'validate':
                self.validate_game(sockobj, data[2], data[3])
            else:  # player wants to join a game
                if sockobj in self.validated and self.validated[sockobj]:  # final validation
                    self.join_game(sockobj, self.validated[sockobj], data[1])
                    # if first player connected set automatic timer (if requested for game)
                    if len(self.validated[sockobj].players) == 1 and \
                            not self.validated[sockobj].wait_for_players:
                        self.timers[self.validated[sockobj]] = ('Start', self.get_current_time())

        elif data[0] == 'login':
            login_request = data[1]
            if login_request == 'login':  # user wants to login
                self.validate_login(sockobj, data[2], data[3])
            elif login_request == 'get_all_kahoots':
                self.add_data(sockobj, ['login', 'get_all_kahoots'] + self.m.get_all_questionnaries())
            elif login_request == 'get_my_kahoots':
                self.add_data(sockobj, ['login', 'get_my_kahoots'] + self.m.get_user_questionnaires(data[2]))
            elif login_request == 'host_game':
                self.open_new_game(sockobj, data[2])
            else:  # create new game (not to play right now)
                self.create_new_game(sockobj, data[2], data[3], data[4], data[5])

        elif data[0] == 'sign_up':
            self.add_new_user(sockobj, data[1], data[2])

        else:  # invalid request
            self.add_data(sockobj, 'Invalid request')

    def initiate(self):
        self.main_socket.bind((self.ip, self.port))
        self.main_socket.listen(4)

    def add_data(self, socketobj, data):
        if type(data) != list:
            data = [data]
        self.messages[socketobj] += data

    def send_all_data(self):
        for socketobj in self.messages:
            if self.messages[socketobj]:
                data = str(self.messages[socketobj])
                try:
                    socketobj.send(data)
                except:
                    pass
                # delete the sent data
                self.messages[socketobj] = []

    def validate_game(self, socketobj, game_name, game_pin):
        game = None
        info = None
        for g in self.games:
            # if game name and pin are correct, and the game hasn't started yet then joining is valid
            if str(g.name) == str(game_name) and str(g.pin) == str(game_pin):
                    if not g.started:
                        self.validated[socketobj] = g  # validate game for client
                        game = g
                    else:
                        info = 'game in progress, no more new players accepted'
        if game and not info:
            self.add_data(socketobj, ['join', 'request', True])
        else:
            self.add_data(socketobj, ['join', 'request', False])
            if not info:
                info = "couldn't find requested game, please check the name and pin."
            data = [{'join': info}]
            self.add_data(socketobj, data)

    def validate_login(self, socketobj, username, password):
        correct_password = self.m.get_user_password(username)
        if correct_password and correct_password == password:  # correct password was entered
            self.validated_login[socketobj] = True
            self.add_data(socketobj, ['login', 'login', True])  # return client successful login
        elif not correct_password:  # no such user in database
            self.add_data(socketobj, ['login', 'login', False, 'User not found'])
        else:
            self.validated_login[socketobj] = self.validated_login[socketobj] + 1 if socketobj in self.validated_login \
                else 1  # number of unsuccessful login attempts
            self.add_data(socketobj, ['login', 'login', False])  # return client unsuccessful

    def create_new_game(self, socketobj, publisher, game_name, questions_and_answers, correct_answers_and_times):
        result = self.m.add_new_questionnaire(publisher, game_name, questions_and_answers, correct_answers_and_times)
        if result:
            self.add_data(socketobj, 'successfully added new questionnaire')
        else:
            self.add_data(socketobj, "couldn't add the questionnaire because name is not unique, "
                                     "please choose a different name.")

    def open_new_game(self, socketobj, game_name):
        print 'Opening new game...'
        game_pin = random.randrange(10**5, 10**6)  # game_pin is a random number between 100000 and 999999
        print 'Game Name: ',game_name,'; Game Pin:', game_pin
        game = Game(game_name, game_pin)
        self.games.append(game)
        self.wall_displays[socketobj] = game
        game.match_question_and_answer(self.m.get_questions_and_answers(game_name))  # update game questions
        # timing settings
        time_settings = eval(self.m.get_time_settings(game_name))  # dictionary of time settings
        game.wait_for_players = time_settings['wait_for_players']
        game.interval_between_questions = time_settings['interval']
        # get starting info and update the wall display
        self.add_data(socketobj, ['login', 'host_game', game_pin, game.wait_for_players, game.interval_between_questions])

    def join_game(self, socketobj, game, player_name):
        if player_name not in game.players:
            game.players[player_name] = 0  # every player starts with 0 points
            self.names[socketobj] = player_name
            self.connected[socketobj] = (game, player_name)  # save the player and the game he is connected to
            self.add_data(socketobj, ['join', True, player_name,
                                      'Successfully added to game!\r\nWaiting for more players...'])
        else:
            self.add_data(socketobj, ['join', False, player_name, 'Name already taken, please choose a different name'])

    def get_connected_players(self, socketobj):
        game = self.wall_displays[socketobj]
        players_in_game = str(game.players)
        self.add_data(socketobj, ['wall_display', 'start_update', players_in_game])

    def get_number_of_answers(self, socketobj):
        game = self.wall_displays[socketobj]
        all_answers = len(game.player_answers)
        self.add_data(socketobj, ['wall_display', 'game_update', 'game_update', str(all_answers)])

    def get_round_info(self, socketobj):
        game = self.wall_displays[socketobj]
        # change the current question and immediately return to what it was before,
        # in order to check if their is a next question
        game.current_question += 1
        if game.get_current_question() == [None]:  # last question
            info = ['wall_display', 'game_update', 'round_results', game.get_game_scores(), True]
        else:  # their is another question after this one
            info = ['wall_display', 'game_update', 'round_results', game.get_game_scores(), False]
        game.current_question -= 1
        self.add_data(socketobj, info)

        # update all the connected clients if they were right or wrong, and give them their scores.
        for clientobj in self.connected:
            # if client is in the same game of the wall display
            if self.connected[clientobj] and self.connected[clientobj][0] == game:
                # send the client the current answers
                player_name = self.names[clientobj]
                if player_name in game.player_answers:
                    client_answer = game.player_answers[player_name] in game.correct_answers[game.current_question]
                else:
                    client_answer = False
                points = game.players[player_name]
                self.add_data(clientobj, ['game', 'round_results', client_answer, points])

    def update_player_answer(self, socketobj, answer, player_name):
        game = self.validated[socketobj]
        correct_answer = game.correct_answers[game.current_question]
        # single answer or multiple correct answers
        if (type(correct_answer) == str and answer == correct_answer) or (answer in correct_answer):
            points = self.calculate_points(True, self.question_start_time[game], self.get_current_time(),
                                           game.times[game.current_question])
            if player_name in game.correct_player_answers:
                game.correct_player_answers[player_name] += 1
            else:  # first correct answer for player
                game.correct_player_answers[player_name] = 1
        else:
            points = self.calculate_points(False)
        game.update_player_and_points(player_name, points)
        game.player_answers[player_name] = answer  # player gave an answer, don't allow more

    def calculate_points(self, answer, start_time=None, time=None, question_time=None):
        if not answer:  # if answer was incorrect don't give the player any points
            return 0
        time_left = question_time - (time - start_time)
        return int(100*(time_left/question_time))

    def get_current_question_info(self, socketobj):
        game = self.wall_displays[socketobj]
        all_answers = len(game.player_answers)
        info = [str(all_answers)] + game.get_current_question()
        return info

    def add_new_user(self, socketobj, username, password):
        result = self.m.add_user(username, password)
        if result == 'user added successfully':
            self.add_data(socketobj, ['sign_up', True])
        else:  # username taken
            result += "Please enter a different username."
            self.add_data(socketobj, ['sign_up', False, result])

    def message_all_game_participants(self, game, message):
        # message clients
        for sockobj in self.validated:
            if self.validated[sockobj] == game:
                self.add_data(sockobj, message)
        # message all wall display clients
        for sockobj in self.wall_displays:
            if self.wall_displays[sockobj] == game:
                self.add_data(sockobj, message)

    def client_score(self, client):
        return self.validated[client].players[self.names[client]]

    def end_game(self, socketobj):
        game = self.wall_displays[socketobj]
        game.ended = True
        info = ['wall_display', 'end_questionnaire', game.get_winners()]
        self.add_data(socketobj, info)
        #self.save_game(game)  # if game ended and more than 2 players where in it, save the game

        # send players their places
        clients = []
        for clientobj in self.connected:
            # if client is in the same game of the wall display
            if self.connected[clientobj] and self.connected[clientobj][0] == game:
                clients.append(clientobj)

        clients = sorted(clients, key=self.client_score, reverse=True)
        for i in range(len(clients)):
            player_name = self.names[clients[i]]
            points = game.players[player_name]
            self.add_data(clients[i], ['game', 'final_results', i+1, points])

    def save_game(self, game):
        if len(game.players) >= 2 and game.ended:
            self.m.add_questionnaire(self.m.get_all_info(game.name)[0][0], self.m.get_all_info(game.name)[0][1],
                                     self.m.get_all_info(game.name)[0][2], game.players,
                                     self.m.get_all_info(game.name)[0][-3])

    def close_clients(self):
        print 'Sending Shut-Down'
        for client in self.connected:
            client.send(str(['server_down']))

    def close(self):
        self.main_socket.close()
Пример #13
0
        return (self.x[idx], self.t[idx], idx)


batch_sz = {'train': 12, 'valid': 10}
dataloader = {
    phase:
    torch.utils.data.DataLoader(dataset=SrDataset(phase, width, height,
                                                  img_dir),
                                batch_size=batch_sz[phase],
                                shuffle=True)
    for phase in ['train', 'valid']
}

use_gpu = torch.cuda.is_available()

module = Module()

if pretrained is None:
    for param in module.parameters():
        print(param.size())
        param.data.normal_(0.001, 0.05)
else:
    module.load_state_dict(torch.load(pretrained))

if use_gpu:
    module.cuda()
    #module = nn.DataParallel(module, gpu)

# print(module)
loss = nn.MSELoss()
optimizer = torch.optim.Adam(module.parameters(), lr=1)
Пример #14
0
 def setUp(self):
     self.m = Module()
 User(name='Bob Tan',
      username='******',
      password=
      '******'
      ),  # temp_pass
 User(name='Mr. GovTech',
      username='******',
      password=
      '******'
      ),  # govtech_strong_password
 User(name='Admin',
      username='******',
      password=
      '******'
      ),  # super_duper_whitehacks_strong_password
 Module(code='IS200', name='Software Foundations'),
 Module(code='IS103', name='Computational Thinking'),
 Module(code='IS101', name='Seminar on Information Systems'),
 Module(code='WRIT001', name='Academic Writing'),
 Lesson(module_code='IS200', name='Lesson 01'),
 Lesson(module_code='IS103', name='Lesson 01'),
 Lesson(module_code='IS101', name='Lesson 01'),
 Lesson(module_code='WRIT001', name='Lesson 01'),
 Document(
     lesson_id=1,
     name='Document 01',
     is_draft=False,
     content=
     'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum'
 ),
 Document(lesson_id=4,
Пример #16
0
class ModuleTests(unittest.TestCase):
    def setUp(self):
        self.cycles = 100
        self.m = Module()

    def tearDown(self):
        pass

    def depth(self):
        return len(self.m.cells)

    def width(self):
        return len(self.m.cells[0])

    def createGridAndTieCell0Input(self,
                                   inWidth,
                                   outWidth,
                                   gridWidth,
                                   d,
                                   v=False):
        self.m.createGrid(inWidth, outWidth, gridWidth, d)
        self.m.tieCell0([v])

    def gatesPruned(self, inWidth, outWidth, gridWidth, depth, flops):
        self.createGridAndTieCell0Input(inWidth, outWidth, gridWidth, depth,
                                        True)
        self.m.setNumFlops(flops)
        self.m.randomizeGates()

        for i in range(self.cycles):
            a = []
            for i in range(inWidth):
                a.append(random.getrandbits(1))

            self.m.driveInputs(a)
            self.m.clk()
            self.m.sampleOutputs()

        return self.m.outputsFixed() or self.m.moduleHasFixedCells()
Пример #17
0
    def __init__(self, moduleList):
        # some definitions used during the bsv compilation process
        env = moduleList.env
        self.moduleList = moduleList

        self.hw_dir = env.Dir(moduleList.env['DEFS']['ROOT_DIR_HW'])

        self.TMP_BSC_DIR = env['DEFS']['TMP_BSC_DIR']
        synth_modules = moduleList.synthBoundaries()

        self.USE_TREE_BUILD = moduleList.getAWBParam('wrapper_gen_tool', 'USE_BUILD_TREE')

        # all_module_dirs: a list of all module directories in the build tree
        self.all_module_dirs = [self.hw_dir.Dir(moduleList.topModule.buildPath)]
        for module in synth_modules:
            if (module.buildPath != moduleList.topModule.buildPath):
                self.all_module_dirs += [self.hw_dir.Dir(module.buildPath)]

        # all_build_dirs: the build (.bsc) sub-directory of all module directories
        self.all_build_dirs = [d.Dir(self.TMP_BSC_DIR) for d in self.all_module_dirs]

        # Include iface directories
        self.all_module_dirs += iface_tool.getIfaceIncludeDirs(moduleList)
        self.all_build_dirs += iface_tool.getIfaceLibDirs(moduleList)

        # Add the top level build directory
        self.all_build_dirs += [env.Dir(self.TMP_BSC_DIR)]

        self.all_module_dirs += [self.hw_dir.Dir('include'),
                                 self.hw_dir.Dir('include/awb/provides')]

        # Full search path: all module and build directories
        self.all_lib_dirs = self.all_module_dirs + self.all_build_dirs

        all_build_dir_paths = [d.path for d in self.all_build_dirs]
        self.ALL_BUILD_DIR_PATHS = ':'.join(all_build_dir_paths)

        all_lib_dir_paths = [d.path for d in self.all_lib_dirs]
        self.ALL_LIB_DIR_PATHS = ':'.join(all_lib_dir_paths)

        # we need to annotate the module list with the
        # bluespec-provided library files. Do so here.
        bsv_tool.decorateBluespecLibraryCode(moduleList)

        self.TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        self.BUILD_LOGS_ONLY = moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')
        self.USE_BVI = moduleList.getAWBParam('bsv_tool', 'USE_BVI')

        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)

        # Should we be building in events?
        if (model.getEvents(moduleList) == 0):
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=False '
        else:
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=True '

        self.BSC_FLAGS = moduleList.getAWBParam('bsv_tool', 'BSC_FLAGS') + bsc_events_flag

        moduleList.env.VariantDir(self.TMP_BSC_DIR, '.', duplicate=0)
        moduleList.env['ENV']['BUILD_DIR'] = moduleList.env['DEFS']['BUILD_DIR']  # need to set the builddir for synplify


        topo = moduleList.topologicalOrderSynth()
        topo.reverse()

        # Cleaning? Wipe out module temporary state. Do this before
        # the topo pop to ensure that we don't leave garbage around at
        # the top level.
        if moduleList.env.GetOption('clean'):
            for module in topo:
                MODULE_PATH =  get_build_path(moduleList, module)
                os.system('cd '+ MODULE_PATH + '/' + self.TMP_BSC_DIR + '; rm -f *.ba *.c *.h *.sched *.log *.v *.bo *.str')

        topo.pop() # get rid of top module.

        ## Python module that generates a wrapper to connect the exposed
        ## wires of all synthesis boundaries.
        tree_builder = bsv_tool.BSVSynthTreeBuilder(self)

        ##
        ## Is this a normal build or a build in which only Bluespec dependence
        ## is computed?
        ##

        if not moduleList.isDependsBuild:
            ##
            ## Normal build.
            ##

            ##
            ## Now that the "depends-init" build is complete we can
            ## continue with accurate inter-Bluespec file dependence.
            ## This build only takes place for the first pass object
            ## code generation.  If the first pass li graph exists, it
            ## subsumes awb-style synthesis boundary generation.
            ##
            for module in topo:
                self.build_synth_boundary(moduleList, module)


            ## We are going to have a whole bunch of BA and V files coming.
            ## We don't yet know what they contain, but we do know that there
            ## will be |synth_modules| - 2 of them

            if (not 'GEN_VERILOGS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_VERILOGS'] = []
            if (not 'GEN_BAS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_BAS'] = []

            ## Having described the new build tree dependencies we can build
            ## the top module.
            self.build_synth_boundary(moduleList, moduleList.topModule)

            ## Merge all synthesis boundaries using a tree?  The tree reduces
            ## the number of connections merged in a single compilation, allowing
            ## us to support larger systems.
            if self.USE_TREE_BUILD:
                tree_builder.setupTreeBuild(moduleList, topo)

            ##
            ## Generate the global string table.  Bluespec-generated global
            ## strings are stored in files by the compiler.
            ##
            ## The global string file will be generated in the top-level
            ## .bsc directory and a link to it will be added to the
            ## top-level directory.
            ##
            all_str_src = []
            #for module in topo + [moduleList.topModule]:
            for module in moduleList.moduleList + topo + [moduleList.topModule]:
                if('STR' in module.moduleDependency):
                    all_str_src.extend(module.moduleDependency['STR'])

            if (self.BUILD_LOGS_ONLY == 0):
                bsc_str = moduleList.env.Command(self.TMP_BSC_DIR + '/' + moduleList.env['DEFS']['APM_NAME'] + '.str',
                                                 all_str_src,
                                                 [ 'cat $SOURCES > $TARGET'])
                strDep = moduleList.env.Command(moduleList.env['DEFS']['APM_NAME'] + '.str',
                                                bsc_str,
                                                [ 'ln -fs ' + self.TMP_BSC_DIR + '/`basename $TARGET` $TARGET' ])
                moduleList.topDependency += [strDep]



            if moduleList.env.GetOption('clean'):
                print 'Cleaning depends-init...'
                s = os.system('scons --clean depends-init')
        else:

            ##
            ## Dependence build.  The target of this build is "depens-init".  No
            ## Bluespec modules will be compiled in this invocation of SCons.
            ## Only .depends-bsv files will be produced.
            ##

            # We need to calculate some dependencies for the build
            # tree.  We could be clever and put this code somewhere
            # rather than replicate it.
            if self.USE_TREE_BUILD: 

                buildTreeDeps = {}
                buildTreeDeps['GEN_VERILOGS'] = []
                buildTreeDeps['GEN_BAS'] = []
                #This is sort of a hack.
                buildTreeDeps['WRAPPER_BSHS'] = ['awb/provides/soft_services.bsh']
                buildTreeDeps['GIVEN_BSVS'] = []
                buildTreeDeps['BA'] = []
                buildTreeDeps['STR'] = []
                buildTreeDeps['VERILOG'] = []
                buildTreeDeps['BSV_LOG'] = []
                buildTreeDeps['VERILOG_STUB'] = []

                tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\
                             moduleList.topModule.name,\
                             [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True)

                tree_module.dependsFile = '.depends-build-tree'

                moduleList.insertModule(tree_module)
                tree_file_bo = get_build_path(moduleList, moduleList.topModule) + "/build_tree.bsv"
                # sprinkle files to get dependencies right
                bo_handle = open(tree_file_bo,'w')

                # mimic AWB/leap-configure

                bo_handle.write('//\n')
                bo_handle.write('// Synthesized compilation file for module: build_tree\n')
                bo_handle.write('//\n')
                bo_handle.write('//   This file was created by BSV.py\n')
                bo_handle.write('//\n')

                bo_handle.write('`define BUILDING_MODULE_build_tree\n')
                bo_handle.write('`include "build_tree_Wrapper.bsv"\n')

                bo_handle.close()

                # Calling generateWrapperStub will write out default _Wrapper.bsv
                # and _Log.bsv files for build tree. However, these files
                # may already exists, and, in the case of build_tree_Wrapper.bsv,
                # have meaningful content.  Fortunately, generateWrapperStub
                # will not over write existing files.
                wrapper_gen_tool.generateWrapperStub(moduleList, tree_module)
                wrapper_gen_tool.generateAWBCompileWrapper(moduleList, tree_module)
                topo.append(tree_module)


            deps = []

            useDerived = True
            first_pass_LI_graph = wrapper_gen_tool.getFirstPassLIGraph()
            if (not first_pass_LI_graph is None):
                useDerived = False
                # we also need to parse the platform_synth file in th
                platform_synth = get_build_path(moduleList, moduleList.topModule) + "/" +  moduleList.localPlatformName + "_platform_synth.bsv"
                platform_deps = ".depends-platform"
                deps += self.compute_dependence(moduleList, moduleList.topModule, useDerived, fileName=platform_deps, targetFiles=[platform_synth])

                # If we have an LI graph, we need to construct and compile
                # several LI wrappers.  do that here.
                # include all the dependencies in the graph in the wrapper.
                li_wrappers = []
                tree_base_path = get_build_path(moduleList, moduleList.topModule)
                liGraph = LIGraph([])
                firstPassGraph = first_pass_LI_graph
                # We should ignore the 'PLATFORM_MODULE'
                liGraph.mergeModules([ module for module in getUserModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None])
                for module in sorted(liGraph.graph.nodes(), key=lambda module: module.name):
                    wrapper_import_path = tree_base_path + '/' + module.name + '_Wrapper.bsv'
                    li_wrappers.append(module.name + '_Wrapper.bsv')
                    wrapper_import_handle = open(wrapper_import_path, 'w')
                    wrapper_import_handle.write('import Vector::*;\n')
                    wrapper_gen_tool.generateWellKnownIncludes(wrapper_import_handle)
                    wrapper_gen_tool.generateBAImport(module, wrapper_import_handle)
                    wrapper_import_handle.close()
                    platform_deps = ".depends-" + module.name
                    deps += self.compute_dependence(moduleList, moduleList.topModule, useDerived, fileName=platform_deps, targetFiles=[wrapper_import_path])
        
            for module in topo + [moduleList.topModule]:
                # for object import builds no Wrapper code will be included. remove it.
                deps += self.compute_dependence(moduleList, module, useDerived, fileName=module.dependsFile)

            moduleList.topDependsInit += deps
Пример #18
0
        "mod_code": "cmpu1004",
    },
    {
        "student_no": "c4444",
        "mod_code": "cmpu1005",
    },
]

for student in students:
    try:
        new_student = Student(**student)
        session.add(new_student)
        session.commit()
    except:
        pass
for module in modules:
    try:
        new_module = Module(**module)
        session.add(new_module)
        session.commit()
    except:
        pass

for entry in student_modules:
    try:
        new_entry = StudentModule(**entry)
        session.add(new_entry)
        session.commit()
    except:
        pass
Пример #19
0
    def __init__(self, moduleList):
        # some definitions used during the bsv compilation process
        env = moduleList.env
        self.moduleList = moduleList

        self.hw_dir = env.Dir(moduleList.env['DEFS']['ROOT_DIR_HW'])

        self.TMP_BSC_DIR = env['DEFS']['TMP_BSC_DIR']
        synth_modules = moduleList.synthBoundaries()

        self.USE_TREE_BUILD = moduleList.getAWBParam('wrapper_gen_tool',
                                                     'USE_BUILD_TREE')

        # all_module_dirs: a list of all module directories in the build tree
        self.all_module_dirs = [
            self.hw_dir.Dir(moduleList.topModule.buildPath)
        ]
        for module in synth_modules:
            if (module.buildPath != moduleList.topModule.buildPath):
                self.all_module_dirs += [self.hw_dir.Dir(module.buildPath)]

        # all_build_dirs: the build (.bsc) sub-directory of all module directories
        self.all_build_dirs = [
            d.Dir(self.TMP_BSC_DIR) for d in self.all_module_dirs
        ]

        # Include iface directories
        self.all_module_dirs += iface_tool.getIfaceIncludeDirs(moduleList)
        self.all_build_dirs += iface_tool.getIfaceLibDirs(moduleList)

        # Add the top level build directory
        self.all_build_dirs += [env.Dir(self.TMP_BSC_DIR)]

        self.all_module_dirs += [
            self.hw_dir.Dir('include'),
            self.hw_dir.Dir('include/awb/provides')
        ]

        # Full search path: all module and build directories
        self.all_lib_dirs = self.all_module_dirs + self.all_build_dirs

        all_build_dir_paths = [d.path for d in self.all_build_dirs]
        self.ALL_BUILD_DIR_PATHS = ':'.join(all_build_dir_paths)

        all_lib_dir_paths = [d.path for d in self.all_lib_dirs]
        self.ALL_LIB_DIR_PATHS = ':'.join(all_lib_dir_paths)

        # we need to annotate the module list with the
        # bluespec-provided library files. Do so here.
        bsv_tool.decorateBluespecLibraryCode(moduleList)

        self.TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        self.BUILD_LOGS_ONLY = moduleList.getAWBParam('bsv_tool',
                                                      'BUILD_LOGS_ONLY')
        self.USE_BVI = moduleList.getAWBParam('bsv_tool', 'USE_BVI')

        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)

        # Should we be building in events?
        if (model.getEvents(moduleList) == 0):
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=False '
        else:
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=True '

        self.BSC_FLAGS = moduleList.getAWBParam('bsv_tool',
                                                'BSC_FLAGS') + bsc_events_flag

        moduleList.env.VariantDir(self.TMP_BSC_DIR, '.', duplicate=0)
        moduleList.env['ENV']['BUILD_DIR'] = moduleList.env['DEFS'][
            'BUILD_DIR']  # need to set the builddir for synplify

        topo = moduleList.topologicalOrderSynth()
        topo.reverse()

        # Cleaning? Wipe out module temporary state. Do this before
        # the topo pop to ensure that we don't leave garbage around at
        # the top level.
        if moduleList.env.GetOption('clean'):
            for module in topo:
                MODULE_PATH = get_build_path(moduleList, module)
                os.system('cd ' + MODULE_PATH + '/' + self.TMP_BSC_DIR +
                          '; rm -f *.ba *.c *.h *.sched *.log *.v *.bo *.str')

        topo.pop()  # get rid of top module.

        ## Python module that generates a wrapper to connect the exposed
        ## wires of all synthesis boundaries.
        tree_builder = bsv_tool.BSVSynthTreeBuilder(self)

        ##
        ## Is this a normal build or a build in which only Bluespec dependence
        ## is computed?
        ##

        if not moduleList.isDependsBuild:
            ##
            ## Normal build.
            ##

            ##
            ## Now that the "depends-init" build is complete we can
            ## continue with accurate inter-Bluespec file dependence.
            ## This build only takes place for the first pass object
            ## code generation.  If the first pass li graph exists, it
            ## subsumes awb-style synthesis boundary generation.
            ##
            for module in topo:
                self.build_synth_boundary(moduleList, module)

            ## We are going to have a whole bunch of BA and V files coming.
            ## We don't yet know what they contain, but we do know that there
            ## will be |synth_modules| - 2 of them

            if (not 'GEN_VERILOGS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_VERILOGS'] = []
            if (not 'GEN_BAS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_BAS'] = []

            ## Having described the new build tree dependencies we can build
            ## the top module.
            self.build_synth_boundary(moduleList, moduleList.topModule)

            ## Merge all synthesis boundaries using a tree?  The tree reduces
            ## the number of connections merged in a single compilation, allowing
            ## us to support larger systems.
            if self.USE_TREE_BUILD:
                tree_builder.setupTreeBuild(moduleList, topo)

            ##
            ## Generate the global string table.  Bluespec-generated global
            ## strings are stored in files by the compiler.
            ##
            ## The global string file will be generated in the top-level
            ## .bsc directory and a link to it will be added to the
            ## top-level directory.
            ##
            all_str_src = []
            #for module in topo + [moduleList.topModule]:
            for module in moduleList.moduleList + topo + [
                    moduleList.topModule
            ]:
                if ('STR' in module.moduleDependency):
                    all_str_src.extend(module.moduleDependency['STR'])

            if (self.BUILD_LOGS_ONLY == 0):
                bsc_str = moduleList.env.Command(
                    self.TMP_BSC_DIR + '/' +
                    moduleList.env['DEFS']['APM_NAME'] + '.str', all_str_src,
                    ['cat $SOURCES > $TARGET'])
                strDep = moduleList.env.Command(
                    moduleList.env['DEFS']['APM_NAME'] + '.str', bsc_str, [
                        'ln -fs ' + self.TMP_BSC_DIR +
                        '/`basename $TARGET` $TARGET'
                    ])
                moduleList.topDependency += [strDep]

            if moduleList.env.GetOption('clean'):
                print 'Cleaning depends-init...'
                s = os.system('scons --clean depends-init')
        else:

            ##
            ## Dependence build.  The target of this build is "depens-init".  No
            ## Bluespec modules will be compiled in this invocation of SCons.
            ## Only .depends-bsv files will be produced.
            ##

            # We need to calculate some dependencies for the build
            # tree.  We could be clever and put this code somewhere
            # rather than replicate it.
            if self.USE_TREE_BUILD:

                buildTreeDeps = {}
                buildTreeDeps['GEN_VERILOGS'] = []
                buildTreeDeps['GEN_BAS'] = []
                #This is sort of a hack.
                buildTreeDeps['WRAPPER_BSHS'] = [
                    'awb/provides/soft_services.bsh'
                ]
                buildTreeDeps['GIVEN_BSVS'] = []
                buildTreeDeps['BA'] = []
                buildTreeDeps['STR'] = []
                buildTreeDeps['VERILOG'] = []
                buildTreeDeps['BSV_LOG'] = []
                buildTreeDeps['VERILOG_STUB'] = []

                tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\
                             moduleList.topModule.name,\
                             [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True)

                tree_module.dependsFile = '.depends-build-tree'

                moduleList.insertModule(tree_module)
                tree_file_bo = get_build_path(
                    moduleList, moduleList.topModule) + "/build_tree.bsv"
                # sprinkle files to get dependencies right
                bo_handle = open(tree_file_bo, 'w')

                # mimic AWB/leap-configure

                bo_handle.write('//\n')
                bo_handle.write(
                    '// Synthesized compilation file for module: build_tree\n')
                bo_handle.write('//\n')
                bo_handle.write('//   This file was created by BSV.py\n')
                bo_handle.write('//\n')

                bo_handle.write('`define BUILDING_MODULE_build_tree\n')
                bo_handle.write('`include "build_tree_Wrapper.bsv"\n')

                bo_handle.close()

                # Calling generateWrapperStub will write out default _Wrapper.bsv
                # and _Log.bsv files for build tree. However, these files
                # may already exists, and, in the case of build_tree_Wrapper.bsv,
                # have meaningful content.  Fortunately, generateWrapperStub
                # will not over write existing files.
                wrapper_gen_tool.generateWrapperStub(moduleList, tree_module)
                wrapper_gen_tool.generateAWBCompileWrapper(
                    moduleList, tree_module)
                topo.append(tree_module)

            deps = []

            useDerived = True
            first_pass_LI_graph = wrapper_gen_tool.getFirstPassLIGraph()
            if (not first_pass_LI_graph is None):
                useDerived = False
                # we also need to parse the platform_synth file in th
                platform_synth = get_build_path(
                    moduleList, moduleList.topModule
                ) + "/" + moduleList.localPlatformName + "_platform_synth.bsv"
                platform_deps = ".depends-platform"
                deps += self.compute_dependence(moduleList,
                                                moduleList.topModule,
                                                useDerived,
                                                fileName=platform_deps,
                                                targetFiles=[platform_synth])

                # If we have an LI graph, we need to construct and compile
                # several LI wrappers.  do that here.
                # include all the dependencies in the graph in the wrapper.
                li_wrappers = []
                tree_base_path = get_build_path(moduleList,
                                                moduleList.topModule)
                liGraph = LIGraph([])
                firstPassGraph = first_pass_LI_graph
                # We should ignore the 'PLATFORM_MODULE'
                liGraph.mergeModules([
                    module for module in getUserModules(firstPassGraph)
                    if module.getAttribute('RESYNTHESIZE') is None
                ])
                for module in sorted(liGraph.graph.nodes(),
                                     key=lambda module: module.name):
                    wrapper_import_path = tree_base_path + '/' + module.name + '_Wrapper.bsv'
                    li_wrappers.append(module.name + '_Wrapper.bsv')
                    wrapper_import_handle = open(wrapper_import_path, 'w')
                    wrapper_import_handle.write('import Vector::*;\n')
                    wrapper_gen_tool.generateWellKnownIncludes(
                        wrapper_import_handle)
                    wrapper_gen_tool.generateBAImport(module,
                                                      wrapper_import_handle)
                    wrapper_import_handle.close()
                    platform_deps = ".depends-" + module.name
                    deps += self.compute_dependence(
                        moduleList,
                        moduleList.topModule,
                        useDerived,
                        fileName=platform_deps,
                        targetFiles=[wrapper_import_path])

            for module in topo + [moduleList.topModule]:
                # for object import builds no Wrapper code will be included. remove it.
                deps += self.compute_dependence(moduleList,
                                                module,
                                                useDerived,
                                                fileName=module.dependsFile)

            moduleList.topDependsInit += deps