def export_fault_list_csv(self): self.FdescFile = os.path.join(self.generatedFilesDir, 'Faultlist.csv') FdescTable = Table('Faultlist', self.get_fdesc_labels()) for idx in range(len(self.fault_list)): for row in self.faultdesc_format_str(idx): FdescTable.add_row(row) FdescTable.to_csv(';', True, self.FdescFile) print('Fault List exported to: {0}'.format(self.FdescFile))
def ExportProfilingStatistics(LutMapList, fname): #Per frame: FAR;MeanActivityTime;FailureRate print('creating perframe dict') perframe_experiments = dict() for lut in LutMapList: for i in range(len(lut['FailureModeEmul'])): if len(lut['FailureModeEmul'][i]) > 0: for j in range(len(lut['FailureModeEmul'][i])): if len(lut['Actime']) > 0 and j < len( lut['Actime'][i]) and lut['Actime'][i][ j] >= 0 and lut['FailureModeEmul'][i][j] >= 0: FAR = lut['globalmap'][i][j][0] if FAR not in perframe_experiments: perframe_experiments[FAR] = [] perframe_experiments[FAR].append( (lut['Actime'][i][j], lut['FailureModeEmul'][i][j])) res = [] for k, v in perframe_experiments.items(): actime = [i[0] for i in v] failures = [i[1] for i in v] #res.append( (k, sum(actime)/len(actime), 100.0*float(sum(failures))/len(failures), len(actime)) ) res.append((k, sum(actime) / (101 * 32), 100.0 * float(sum(failures)) / (101 * 32), len(actime))) T = Table('PerFrameRes', ['FAR', 'MeanActime', 'FailureRate', 'items']) for i in res: T.add_row(map(str, [i[0], i[1], i[2], i[3]])) with open(fname, 'w') as f: f.write(T.to_csv())
def GenerateFaultload(self): if not self.DutScope=='' and not self.DutScope.endswith('/'): self.DutScope+='/' #Step 1: Build the list of frame addresses (obtained by running InjApp in profiling mode) FarList = LoadFarList(self.Input_FarListFile) #Step 2: Build the list of frame discriptors for complete bitstream (*.bit or *.bin) BIN_FrameList = bitstream_to_FrameList(self.Input_BinstreamFile, FarList) i=0 while i < len(BIN_FrameList): if BIN_FrameList[i].Minor==0: buf = BIN_FrameList[i].Major cnt = 0 while (i+cnt < len(BIN_FrameList)) and BIN_FrameList[i+cnt].Major == buf: cnt+=1 if cnt==36: for j in range(cnt): BIN_FrameList[i].type="CLB" i+=1 else: i+=1 else: i+=1 area_filter = [] if self.PblockCoord: area_filter = get_pblock_mjr_coord(self.DevicePart, self.PblockCoord[0], self.PblockCoord[1], self.PblockCoord[2], self.PblockCoord[3]) if self.target_logic=='type0' or self.target_logic=='all' or (self.target_logic=='lut'): # and not self.CustomLutMask): #Step 4: Build the list of frame descriptors from EBC+EBD (essential bits) EBC_FrameList = EBC_to_FrameList(self.Input_EBCFile, self.Input_EBDFile, FarList) #Step 5: Compare BIN to EBC and, if no mismatches found, copy essential bits mask to BIN mismatches = 0 for i in range(len(EBC_FrameList)): for k in range(FrameSize): if EBC_FrameList[i].data[k] != BIN_FrameList[i].data[k]: if self.verbosity > 0: self.logfile.write('Check EBC vs BIT: mismatch at Frame[{0:08x}]: Block={1:5d}, Top={2:5d}, Row={3:5d}, Major={4:5d}, Minor={5:5d}\n'.format(BIN_FrameList[i].GetFar(), BIN_FrameList[i].BlockType, BIN_FrameList[i].Top, BIN_FrameList[i].Row, self.Major, BIN_FrameList[i].Minor)) mismatches+=1 if mismatches == 0: self.logfile.write('\nCheck EBC vs BIT: Complete Match\n') else: self.logfile.write('Check EBC vs BIT: Mismatches Count = {0:d}\n'.format(mismatches)) if mismatches ==0: for i in range(len(EBC_FrameList)): #if (self.target_logic in ['type0', 'all']) or (self.target_logic=='lut' and BIN_FrameList[i].Minor in [26,27,28,29, 32,33,34,35]): if (self.target_logic in ['type0', 'all']) or (self.target_logic=='lut' and BIN_FrameList[i].type=="CLB" and BIN_FrameList[i].Minor in [26,27,28,29, 32,33,34,35]): if (not area_filter) or (area_filter and (BIN_FrameList[i].Top, BIN_FrameList[i].Row, BIN_FrameList[i].Major) in area_filter): BIN_FrameList[i].mask = EBC_FrameList[i].mask XilinxLutBitCnt = 0 for frame in BIN_FrameList: stat = frame.get_stat() XilinxLutBitCnt+= stat['TotalBits'] print('Essential bits COUNT (initial): {0}'.format(XilinxLutBitCnt)) if self.CustomLutMask: LutDescTab = Table('LutMap'); LutDescTab.build_from_csv(os.path.join(self.targetDir, 'LUTMAP.csv')) print('Mapping LUTs to bitstream') LutMapList = MapLutToBitstream(LutDescTab, BIN_FrameList, self.DutScope) with open(self.LutMapFile,'w') as f: f.write(LutListToTable(LutMapList).to_csv()) if self.target_logic in ['type0','all','lut']: for i in BIN_FrameList: if (not area_filter) or (area_filter and (i.Top, i.Row, i.Major) in area_filter): if i.Minor in [26,27,28,29, 32,33,34,35]: if i.custom_mask==[]: i.mask = [0x0]*FrameSize else: for k in range(FrameSize): i.mask[k] = i.custom_mask[k] #(i.mask[k] ^ i.custom_mask[k]) & i.mask[k] XilinxLutBitCnt = 0 for frame in BIN_FrameList: stat = frame.get_stat() XilinxLutBitCnt+= stat['TotalBits'] print('Essential bits COUNT (after LUT mapping): {0}'.format(XilinxLutBitCnt)) if self.Profiling and self.DAVOS_Config != None: if not os.path.exists(self.FaultListFile): print('Profiling LUTs switching activity') self.ProfilingResult = Estimate_LUT_switching_activity(LutMapList, self.DAVOS_Config) ExportFaultList(self.ProfilingResult, self.FaultListFile) else: self.ProfilingResult = LoadFaultList(self.FaultListFile) #load from file with open(self.LutMapFile,'w') as f: f.write(LutListToTable(LutMapList).to_csv()) FrameDict = dict() for frame in BIN_FrameList: FrameDict[frame.GetFar()] = frame for item in self.ProfilingResult: if item['Actime'] == 0: FrameDict[item['BitstreamCoordinates'][0]].mask[item['BitstreamCoordinates'][1]] &= (0xFFFFFFFF^(1<<item['BitstreamCoordinates'][2])) #FrameDict[item['BitstreamCoordinates'][0]].mask[item['BitstreamCoordinates'][1]] |= 1<<item['BitstreamCoordinates'][2] XilinxLutBitCnt = 0 for frame in BIN_FrameList: stat = frame.get_stat() XilinxLutBitCnt+= stat['TotalBits'] print('Essential bits COUNT (after profiling): {0} '.format(XilinxLutBitCnt)) #Step 3: append targets from LL file (FF and BRAM) FFMap = [] BramMap = [] #[BramNode, BramBit, FAR, word, bit, data) LutramMap = [] #[LutramNode, Bit, FAR, word, bit, data) RecoveryRamLocations = [] FAR_CLB = set() T = Table('Cells') T.build_from_csv(self.Input_CellDescFile) for node in self.RecoveryNodeNames: #print("Locations for {}".format(node)) for i in T.query({'Node':node, 'BellType':'RAMB'}): RecoveryRamLocations.append(i['CellLocation']) BramNodes = dict() for i in T.query({'BellType':'RAMB'}): BramNodes[i['CellLocation']] = i['Node'] LutRamNodes = dict() for i in T.query({'CellType':'DMEM.dram'}): LutRamNodes[(i['CellLocation'],re.findall("\.([A|B|C|D]+[0-9]+)",i['BEL'])[0])] = i['Node'] #print("Recovery Ram Locations: {}".format(str(RecoveryRamLocations)) ) self.logfile.write('Recovery RAM Location: ' + str(RecoveryRamLocations)+'\n') #Set mask=1 for all bits of used BRAM (from *.ll file) #And build FAR recovery list - include all FAR from *.ll file containing bits of selected design units (e.g. ROM inferred on BRAM) FARmask = dict() RecoveryFrames = set() CheckpointFrames = set() BinDataDict = dict() for i in BIN_FrameList: BinDataDict[i.GetFar()] = i with open(self.Input_LLFile, 'r') as f: for line in f: matchDesc , t = re.search(ram_search_ptn,line), 1 if not matchDesc: matchDesc, t = re.search(ff_search_ptn,line), 2 if not matchDesc: matchDesc, t = re.search(lutram_search_ptn,line), 3 if matchDesc: FAR = int(matchDesc.group(1), 16) offset = int(matchDesc.group(2)) block = matchDesc.group(3) if t==1: nodepath = BramNodes[block] elif t == 2: nodepath = matchDesc.group(5) elif t==3: bel = '{0}{1}'.format(matchDesc.group(4), str(5) if int(matchDesc.group(5)) % 2 == 0 else str(6)) if (block,bel) in LutRamNodes: nodepath = LutRamNodes[(block,bel)] else: continue if t==1 and (block in RecoveryRamLocations): RecoveryFrames.add(FAR) if (nodepath.startswith(self.DutScope) or self.DutScope ==''): if t in [2] and nodepath.startswith(self.DutScope): CheckpointFrames.add(FAR) if (t==1 and self.target_logic=='bram') or (t==2 and self.target_logic in ['ff', 'type0', 'ff+lutram']) or self.target_logic == 'all' or (t==3 and self.target_logic in ['lutram', 'ff+lutram']): word, bit =offset/32, offset%32 if t==1: if matchDesc.group(4) in ['BIT', 'PARBIT']: BramMap.append((nodepath, int(matchDesc.group(5)), FAR, word, bit, (BinDataDict[FAR].data[word]>>bit)&0x1)) if t==2: FFMap.append((nodepath, 0, FAR, word, bit, (BinDataDict[FAR].data[word]>>bit)&0x1)) if t==3: LutramMap.append((nodepath, int(matchDesc.group(5)), FAR, word, bit, (BinDataDict[FAR].data[word]>>bit)&0x1)) if FAR in FARmask: desc = FARmask[FAR] else: desc = FrameDesc(FAR) desc.mask=[0]*FrameSize FARmask[FAR] = desc desc.mask[word] |= 1<<bit if len(FFMap)>0: Tab = Table('FFMap',['Node','Case','FAR','word','bit','data']) for i in FFMap: CheckpointFrames.add(i[2]) Tab.add_row([str(i[0]), str(i[1]), str(i[2]), str(i[3]), str(i[4]), str(i[5])]) Tab.to_csv(';', True, os.path.join(self.targetDir,'FFMapList.csv')) if len(BramMap)>0: Tab = Table('BramMap',['Node','Case','FAR','word','bit','data']) for i in BramMap: CheckpointFrames.add(i[2]) Tab.add_row([str(i[0]), str(i[1]), str(i[2]), str(i[3]), str(i[4]), str(i[5])]) Tab.to_csv(';', True, os.path.join(self.targetDir,'BramMapList.csv')) if len(LutramMap)>0: Tab = Table('LutramMap',['Node','Case','FAR','word','bit','data']) for i in LutramMap: CheckpointFrames.add(i[2]) Tab.add_row([str(i[0]), str(i[1]), str(i[2]), str(i[3]), str(i[4]), str(i[5])]) with open(os.path.join(self.targetDir,'LutramMapList.csv'),'w') as f: f.write(Tab.to_csv()) for key in sorted(FARmask): for i in BIN_FrameList: if i.GetFar() == key: if (not area_filter) or (area_filter and (i.Top, i.Row, i.Major) in area_filter): for k in range(0, len(i.mask)): i.mask[k] |= FARmask[key].mask[k] if self.verbosity > 2: self.logfile.write("{0:08x} : {1:s}\n".format(i.GetFar(), ' '.join(['{0:08x}'.format(x) for x in i.mask]))) break self.logfile.write('Recovery FAR: {}\n'.format(",".join(["{0:08x}".format(i) for i in sorted(list(RecoveryFrames))]))) #Export the resulting descriptor #with open(os.path.join(self.targetDir,'BitLogBram.txt'),'w') as f: # BramMap.sort() # f.write('\n'.join([str(i) for i in BramMap])) #with open(os.path.join(self.targetDir,'BitLog.txt'),'w') as f: # for i in BIN_FrameList: # if all(v==0 for v in i.mask): continue # else: # f.write(i.to_string(2)+'\n\n') export_DescriptorFile(self.Output_FrameDescFile, BIN_FrameList, RecoveryFrames, CheckpointFrames) populationsize = 0 for i in list(range(0, 9)): self.EssentialBitsPerBlockType.append(0) for i in BIN_FrameList: populationsize += i.EssentialBitsCount self.EssentialBitsPerBlockType[i.BlockType] += i.EssentialBitsCount #self.logfile.write('FAR: {0:08x} = {1:5d} Essential bits\n'.format(i.GetFar(), i.EssentialBitsCount)) self.logfile.write('Population Size: {0:10d}\n'.format(populationsize)) self.logfile.write('CheckpointFrames = '+ ', '.join(['{0:08x}'.format(int(x)) for x in CheckpointFrames]))
def build_FFI_report(DavosConfig, ExportLutCsv=False): datamodel = DataModel() if not os.path.exists(DavosConfig.report_dir): os.makedirs(DavosConfig.report_dir) datamodel.ConnectDatabase(DavosConfig.get_DBfilepath(False), DavosConfig.get_DBfilepath(True)) datamodel.RestoreHDLModels(DavosConfig.parconf) datamodel.RestoreEntity(DataDescriptors.InjTarget) datamodel.SaveHdlModels() for conf in DavosConfig.parconf: model = datamodel.GetHdlModel(conf.label) Tab = Table('LutMapList') Tab.build_from_csv(os.path.join(conf.work_dir, 'LutMapList.csv')) LutMapList = TableToLutList(Tab) if ExportLutCsv: AggregateLutResults( LutMapList, os.path.join(conf.work_dir, './log/Injector.log')) with open(os.path.join(conf.work_dir, 'LutResult.csv'), 'w') as f: zTab = LutListToTable(LutMapList, True, False) f.write(zTab.to_csv()) if DavosConfig.FFIConfig.profiling: ExportProfilingStatistics( LutMapList, os.path.join(conf.work_dir, 'PerFrame.csv')) SummaryTable = Table( model.Label, ['ID', 'Target', 'FAR', 'word', 'bit', 'Actime', 'FailureMode']) lut_dict = build_lut_coord_dict(LutMapList, '') ff_dict = load_Map_dict(os.path.join(conf.work_dir, 'FFMapList.csv')) ram_dict = load_Map_dict(os.path.join(conf.work_dir, 'BramMapList.csv')) lutram_dict = load_Map_dict( os.path.join(conf.work_dir, 'LutramMapList.csv')) #ff_dict, ram_dict = build_regmem_coord_dict(os.path.join(conf.work_dir, 'Bitstream.ll'), os.path.join(conf.work_dir, 'Bels.csv')) ExpDescIdCnt = datamodel.GetMaxKey(DataDescriptors.InjectionExp) + 1 with open(os.path.join(conf.work_dir, './log/Injector.log'), 'rU') as f: content = f.readlines() for l in content: match = re.search(injlog_item_ptn, l) if match: coord = (int(match.group(2)), int(match.group(3)), int(match.group(4))) if coord in lut_dict: target = datamodel.GetOrAppendTarget( lut_dict[coord][0][0]['name'], 'LUT', '{}/{}'.format(lut_dict[coord][0][1], lut_dict[coord][0][2])) elif coord in ff_dict: target = datamodel.GetOrAppendTarget( ff_dict[coord]['node'], 'FF', ff_dict[coord]['case']) elif coord in ram_dict: target = datamodel.GetOrAppendTarget( ram_dict[coord]['node'], 'BRAM', ram_dict[coord]['case']) elif coord in lutram_dict: target = datamodel.GetOrAppendTarget( lutram_dict[coord]['node'], 'LUTRAM', lutram_dict[coord]['case']) else: target = datamodel.GetOrAppendTarget( 'U:{0:08x}:{1:03d}:{2:02d}'.format( int(match.group(2)), int(match.group(3)), int(match.group(4))), 'TYPE0', '') InjDesc = InjectionDescriptor() InjDesc.InjectionTime = float(match.group(5)) fmode = match.group(6).lower() InjDesc.FailureMode = 'M' if fmode.find( 'masked') >= 0 else 'L' if fmode.find( 'latent') >= 0 else 'C' if fmode.find( 'sdc') >= 0 else 'S' if fmode.find('signaled' ) >= 0 else 'X' InjDesc.ID = ExpDescIdCnt InjDesc.ModelID = model.ID InjDesc.TargetID = target.ID InjDesc.FaultModel = 'BitFlip' InjDesc.ForcedValue = '' InjDesc.InjectionDuration = float(0) InjDesc.ObservationTime = float(0) InjDesc.Node = target.NodeFullPath InjDesc.InjCase = target.InjectionCase InjDesc.Status = 'F' InjDesc.FaultToFailureLatency = float(0) InjDesc.ErrorCount = 0 InjDesc.Dumpfile = '' datamodel.LaunchedInjExp_dict[InjDesc.ID] = InjDesc actime = float(-1.0) if match.group(8) == None else float( match.group(8)) SummaryTable.add_row( map(str, [ int(match.group(1)), target.NodeFullPath, coord[0], coord[1], coord[2], actime, InjDesc.FailureMode ])) ExpDescIdCnt += 1 if ExpDescIdCnt % 100 == 0: sys.stdout.write('Processed report lines: {0}\r'.format( str(ExpDescIdCnt))) SummaryTable.to_csv( ';', True, os.path.join(DavosConfig.report_dir, '{0}.csv'.format(model.Label))) T = SummaryTable.to_html_table('SEU_LUT_Details') T.to_file( os.path.join(DavosConfig.report_dir, '{0}.html'.format(model.Label))) datamodel.SaveHdlModels() datamodel.SaveTargets() datamodel.SaveInjections() build_report(DavosConfig, DavosConfig.toolconf, datamodel) datamodel.SyncAndDisconnectDB()