def build_regmem_coord_dict(LLfname, Beldescfname): ff_coord_dict, ram_coord_dict = {}, {} ramblocation_dict = {} T = Table('Bels') T.build_from_csv(Beldescfname) for i in T.query({'BellType': 'RAMB'}): ramblocation_dict[i['CellLocation']] = i['Node'] with open(LLfname, 'r') as f: for line in f: matchDesc, t = re.search(ram_search_ptn, line), 1 if not matchDesc: matchDesc, t = re.search(ff_search_ptn, line), 2 if matchDesc: FAR = int(matchDesc.group(1), 16) offset = int(matchDesc.group(2)) block = matchDesc.group(3) word, bit = offset / 32, offset % 32 if t == 1: ram_coord_dict[(FAR, word, bit)] = { 'node': ramblocation_dict[matchDesc.group(3)], 'case': '{}/{}'.format(matchDesc.group(4), matchDesc.group(5)) } elif t == 2: ff_coord_dict[(FAR, word, bit)] = { 'node': matchDesc.group(5), 'case': '' } return (ff_coord_dict, ram_coord_dict)
def ExportProfilingStatistics(LutMapList, fname): #Per frame: FAR;MeanActivityTime;FailureRate print('creating perframe dict') perframe_experiments = dict() for lut in LutMapList: for i in range(len(lut['FailureModeEmul'])): if len(lut['FailureModeEmul'][i]) > 0: for j in range(len(lut['FailureModeEmul'][i])): if len(lut['Actime']) > 0 and j < len( lut['Actime'][i]) and lut['Actime'][i][ j] >= 0 and lut['FailureModeEmul'][i][j] >= 0: FAR = lut['globalmap'][i][j][0] if FAR not in perframe_experiments: perframe_experiments[FAR] = [] perframe_experiments[FAR].append( (lut['Actime'][i][j], lut['FailureModeEmul'][i][j])) res = [] for k, v in perframe_experiments.items(): actime = [i[0] for i in v] failures = [i[1] for i in v] #res.append( (k, sum(actime)/len(actime), 100.0*float(sum(failures))/len(failures), len(actime)) ) res.append((k, sum(actime) / (101 * 32), 100.0 * float(sum(failures)) / (101 * 32), len(actime))) T = Table('PerFrameRes', ['FAR', 'MeanActime', 'FailureRate', 'items']) for i in res: T.add_row(map(str, [i[0], i[1], i[2], i[3]])) with open(fname, 'w') as f: f.write(T.to_csv())
def build_lut_coord_dict(LutMapList, SimResFile=''): simresdict = {} if SimResFile != '': print('Processing simulation results: {0}'.format(SimResFile)) SimRes = Table('SimRes') SimRes.build_from_csv(SimResFile) node_ind, case_ind, res_ind = SimRes.labels.index( 'Node'), SimRes.labels.index('InjCase'), SimRes.labels.index( 'FailureMode') for i in range(SimRes.rownum()): node = SimRes.get(i, node_ind) case = int(re.findall('[0-9]+', SimRes.get(i, case_ind))[0]) simresdict[(node, case)] = SimRes.get(i, res_ind).upper() coord_dict = {} for lut in LutMapList: if 'Multiplicity' not in lut: lut['Multiplicity'] = len(lut['globalmap'][0]) if ('FailureModeEmul' not in lut) or (lut['FailureModeEmul'] == []): lut['FailureModeEmul'] = [[-1] * lut['Multiplicity'] for i in range(len(lut['globalmap']))] for i in range(len(lut['globalmap'])): if (lut['simnode'], i) in simresdict: lut['FailureModeSim'].append(simresdict[(lut['simnode'], i)]) for j in range(len(lut['globalmap'][i])): #several logic luts can use the same memory cell (LUT6_2 bell = LUT6 cell + LUT5 cell ) if not lut['globalmap'][i][j] in coord_dict: coord_dict[lut['globalmap'][i][j]] = [] coord_dict[lut['globalmap'][i][j]].append((lut, i, j)) return (coord_dict)
def load_Map_dict(fname): res = {} if os.path.exists(fname): Tab = Table('MapList') Tab.build_from_csv(fname) for i in range(Tab.rownum()): res[(int(Tab.getByLabel('FAR', i)), int(Tab.getByLabel('word', i)), int(Tab.getByLabel('bit', i)))] = { 'node': Tab.getByLabel('Node', i), 'case': Tab.getByLabel('Case', i) } return (res)
def export_fault_list_csv(self): self.FdescFile = os.path.join(self.generatedFilesDir, 'Faultlist.csv') FdescTable = Table('Faultlist', self.get_fdesc_labels()) for idx in range(len(self.fault_list)): for row in self.faultdesc_format_str(idx): FdescTable.add_row(row) FdescTable.to_csv(';', True, self.FdescFile) print('Fault List exported to: {0}'.format(self.FdescFile))
def GenerateFaultload(self): if not self.DutScope=='' and not self.DutScope.endswith('/'): self.DutScope+='/' #Step 1: Build the list of frame addresses (obtained by running InjApp in profiling mode) FarList = LoadFarList(self.Input_FarListFile) #Step 2: Build the list of frame discriptors for complete bitstream (*.bit or *.bin) BIN_FrameList = bitstream_to_FrameList(self.Input_BinstreamFile, FarList) i=0 while i < len(BIN_FrameList): if BIN_FrameList[i].Minor==0: buf = BIN_FrameList[i].Major cnt = 0 while (i+cnt < len(BIN_FrameList)) and BIN_FrameList[i+cnt].Major == buf: cnt+=1 if cnt==36: for j in range(cnt): BIN_FrameList[i].type="CLB" i+=1 else: i+=1 else: i+=1 area_filter = [] if self.PblockCoord: area_filter = get_pblock_mjr_coord(self.DevicePart, self.PblockCoord[0], self.PblockCoord[1], self.PblockCoord[2], self.PblockCoord[3]) if self.target_logic=='type0' or self.target_logic=='all' or (self.target_logic=='lut'): # and not self.CustomLutMask): #Step 4: Build the list of frame descriptors from EBC+EBD (essential bits) EBC_FrameList = EBC_to_FrameList(self.Input_EBCFile, self.Input_EBDFile, FarList) #Step 5: Compare BIN to EBC and, if no mismatches found, copy essential bits mask to BIN mismatches = 0 for i in range(len(EBC_FrameList)): for k in range(FrameSize): if EBC_FrameList[i].data[k] != BIN_FrameList[i].data[k]: if self.verbosity > 0: self.logfile.write('Check EBC vs BIT: mismatch at Frame[{0:08x}]: Block={1:5d}, Top={2:5d}, Row={3:5d}, Major={4:5d}, Minor={5:5d}\n'.format(BIN_FrameList[i].GetFar(), BIN_FrameList[i].BlockType, BIN_FrameList[i].Top, BIN_FrameList[i].Row, self.Major, BIN_FrameList[i].Minor)) mismatches+=1 if mismatches == 0: self.logfile.write('\nCheck EBC vs BIT: Complete Match\n') else: self.logfile.write('Check EBC vs BIT: Mismatches Count = {0:d}\n'.format(mismatches)) if mismatches ==0: for i in range(len(EBC_FrameList)): #if (self.target_logic in ['type0', 'all']) or (self.target_logic=='lut' and BIN_FrameList[i].Minor in [26,27,28,29, 32,33,34,35]): if (self.target_logic in ['type0', 'all']) or (self.target_logic=='lut' and BIN_FrameList[i].type=="CLB" and BIN_FrameList[i].Minor in [26,27,28,29, 32,33,34,35]): if (not area_filter) or (area_filter and (BIN_FrameList[i].Top, BIN_FrameList[i].Row, BIN_FrameList[i].Major) in area_filter): BIN_FrameList[i].mask = EBC_FrameList[i].mask XilinxLutBitCnt = 0 for frame in BIN_FrameList: stat = frame.get_stat() XilinxLutBitCnt+= stat['TotalBits'] print('Essential bits COUNT (initial): {0}'.format(XilinxLutBitCnt)) if self.CustomLutMask: LutDescTab = Table('LutMap'); LutDescTab.build_from_csv(os.path.join(self.targetDir, 'LUTMAP.csv')) print('Mapping LUTs to bitstream') LutMapList = MapLutToBitstream(LutDescTab, BIN_FrameList, self.DutScope) with open(self.LutMapFile,'w') as f: f.write(LutListToTable(LutMapList).to_csv()) if self.target_logic in ['type0','all','lut']: for i in BIN_FrameList: if (not area_filter) or (area_filter and (i.Top, i.Row, i.Major) in area_filter): if i.Minor in [26,27,28,29, 32,33,34,35]: if i.custom_mask==[]: i.mask = [0x0]*FrameSize else: for k in range(FrameSize): i.mask[k] = i.custom_mask[k] #(i.mask[k] ^ i.custom_mask[k]) & i.mask[k] XilinxLutBitCnt = 0 for frame in BIN_FrameList: stat = frame.get_stat() XilinxLutBitCnt+= stat['TotalBits'] print('Essential bits COUNT (after LUT mapping): {0}'.format(XilinxLutBitCnt)) if self.Profiling and self.DAVOS_Config != None: if not os.path.exists(self.FaultListFile): print('Profiling LUTs switching activity') self.ProfilingResult = Estimate_LUT_switching_activity(LutMapList, self.DAVOS_Config) ExportFaultList(self.ProfilingResult, self.FaultListFile) else: self.ProfilingResult = LoadFaultList(self.FaultListFile) #load from file with open(self.LutMapFile,'w') as f: f.write(LutListToTable(LutMapList).to_csv()) FrameDict = dict() for frame in BIN_FrameList: FrameDict[frame.GetFar()] = frame for item in self.ProfilingResult: if item['Actime'] == 0: FrameDict[item['BitstreamCoordinates'][0]].mask[item['BitstreamCoordinates'][1]] &= (0xFFFFFFFF^(1<<item['BitstreamCoordinates'][2])) #FrameDict[item['BitstreamCoordinates'][0]].mask[item['BitstreamCoordinates'][1]] |= 1<<item['BitstreamCoordinates'][2] XilinxLutBitCnt = 0 for frame in BIN_FrameList: stat = frame.get_stat() XilinxLutBitCnt+= stat['TotalBits'] print('Essential bits COUNT (after profiling): {0} '.format(XilinxLutBitCnt)) #Step 3: append targets from LL file (FF and BRAM) FFMap = [] BramMap = [] #[BramNode, BramBit, FAR, word, bit, data) LutramMap = [] #[LutramNode, Bit, FAR, word, bit, data) RecoveryRamLocations = [] FAR_CLB = set() T = Table('Cells') T.build_from_csv(self.Input_CellDescFile) for node in self.RecoveryNodeNames: #print("Locations for {}".format(node)) for i in T.query({'Node':node, 'BellType':'RAMB'}): RecoveryRamLocations.append(i['CellLocation']) BramNodes = dict() for i in T.query({'BellType':'RAMB'}): BramNodes[i['CellLocation']] = i['Node'] LutRamNodes = dict() for i in T.query({'CellType':'DMEM.dram'}): LutRamNodes[(i['CellLocation'],re.findall("\.([A|B|C|D]+[0-9]+)",i['BEL'])[0])] = i['Node'] #print("Recovery Ram Locations: {}".format(str(RecoveryRamLocations)) ) self.logfile.write('Recovery RAM Location: ' + str(RecoveryRamLocations)+'\n') #Set mask=1 for all bits of used BRAM (from *.ll file) #And build FAR recovery list - include all FAR from *.ll file containing bits of selected design units (e.g. ROM inferred on BRAM) FARmask = dict() RecoveryFrames = set() CheckpointFrames = set() BinDataDict = dict() for i in BIN_FrameList: BinDataDict[i.GetFar()] = i with open(self.Input_LLFile, 'r') as f: for line in f: matchDesc , t = re.search(ram_search_ptn,line), 1 if not matchDesc: matchDesc, t = re.search(ff_search_ptn,line), 2 if not matchDesc: matchDesc, t = re.search(lutram_search_ptn,line), 3 if matchDesc: FAR = int(matchDesc.group(1), 16) offset = int(matchDesc.group(2)) block = matchDesc.group(3) if t==1: nodepath = BramNodes[block] elif t == 2: nodepath = matchDesc.group(5) elif t==3: bel = '{0}{1}'.format(matchDesc.group(4), str(5) if int(matchDesc.group(5)) % 2 == 0 else str(6)) if (block,bel) in LutRamNodes: nodepath = LutRamNodes[(block,bel)] else: continue if t==1 and (block in RecoveryRamLocations): RecoveryFrames.add(FAR) if (nodepath.startswith(self.DutScope) or self.DutScope ==''): if t in [2] and nodepath.startswith(self.DutScope): CheckpointFrames.add(FAR) if (t==1 and self.target_logic=='bram') or (t==2 and self.target_logic in ['ff', 'type0', 'ff+lutram']) or self.target_logic == 'all' or (t==3 and self.target_logic in ['lutram', 'ff+lutram']): word, bit =offset/32, offset%32 if t==1: if matchDesc.group(4) in ['BIT', 'PARBIT']: BramMap.append((nodepath, int(matchDesc.group(5)), FAR, word, bit, (BinDataDict[FAR].data[word]>>bit)&0x1)) if t==2: FFMap.append((nodepath, 0, FAR, word, bit, (BinDataDict[FAR].data[word]>>bit)&0x1)) if t==3: LutramMap.append((nodepath, int(matchDesc.group(5)), FAR, word, bit, (BinDataDict[FAR].data[word]>>bit)&0x1)) if FAR in FARmask: desc = FARmask[FAR] else: desc = FrameDesc(FAR) desc.mask=[0]*FrameSize FARmask[FAR] = desc desc.mask[word] |= 1<<bit if len(FFMap)>0: Tab = Table('FFMap',['Node','Case','FAR','word','bit','data']) for i in FFMap: CheckpointFrames.add(i[2]) Tab.add_row([str(i[0]), str(i[1]), str(i[2]), str(i[3]), str(i[4]), str(i[5])]) Tab.to_csv(';', True, os.path.join(self.targetDir,'FFMapList.csv')) if len(BramMap)>0: Tab = Table('BramMap',['Node','Case','FAR','word','bit','data']) for i in BramMap: CheckpointFrames.add(i[2]) Tab.add_row([str(i[0]), str(i[1]), str(i[2]), str(i[3]), str(i[4]), str(i[5])]) Tab.to_csv(';', True, os.path.join(self.targetDir,'BramMapList.csv')) if len(LutramMap)>0: Tab = Table('LutramMap',['Node','Case','FAR','word','bit','data']) for i in LutramMap: CheckpointFrames.add(i[2]) Tab.add_row([str(i[0]), str(i[1]), str(i[2]), str(i[3]), str(i[4]), str(i[5])]) with open(os.path.join(self.targetDir,'LutramMapList.csv'),'w') as f: f.write(Tab.to_csv()) for key in sorted(FARmask): for i in BIN_FrameList: if i.GetFar() == key: if (not area_filter) or (area_filter and (i.Top, i.Row, i.Major) in area_filter): for k in range(0, len(i.mask)): i.mask[k] |= FARmask[key].mask[k] if self.verbosity > 2: self.logfile.write("{0:08x} : {1:s}\n".format(i.GetFar(), ' '.join(['{0:08x}'.format(x) for x in i.mask]))) break self.logfile.write('Recovery FAR: {}\n'.format(",".join(["{0:08x}".format(i) for i in sorted(list(RecoveryFrames))]))) #Export the resulting descriptor #with open(os.path.join(self.targetDir,'BitLogBram.txt'),'w') as f: # BramMap.sort() # f.write('\n'.join([str(i) for i in BramMap])) #with open(os.path.join(self.targetDir,'BitLog.txt'),'w') as f: # for i in BIN_FrameList: # if all(v==0 for v in i.mask): continue # else: # f.write(i.to_string(2)+'\n\n') export_DescriptorFile(self.Output_FrameDescFile, BIN_FrameList, RecoveryFrames, CheckpointFrames) populationsize = 0 for i in list(range(0, 9)): self.EssentialBitsPerBlockType.append(0) for i in BIN_FrameList: populationsize += i.EssentialBitsCount self.EssentialBitsPerBlockType[i.BlockType] += i.EssentialBitsCount #self.logfile.write('FAR: {0:08x} = {1:5d} Essential bits\n'.format(i.GetFar(), i.EssentialBitsCount)) self.logfile.write('Population Size: {0:10d}\n'.format(populationsize)) self.logfile.write('CheckpointFrames = '+ ', '.join(['{0:08x}'.format(int(x)) for x in CheckpointFrames]))
def GenerateFaultload(targetDir, DAVOS_Config, logfile, verbosity, Input_FarListFile, Input_EBCFile, Input_EBDFile, Input_BinstreamFile, CustomLutMask): Output_FrameDescFile = os.path.join(targetDir, 'FrameDescriptors.dat') FaultListFile = os.path.join(targetDir, 'FaultList.dat') LutMapFile = os.path.join(targetDir, 'LutMapList.csv') if os.path.exists(Output_FrameDescFile): return(Output_FrameDescFile, FaultListFile, LutMapFile) #Step 1: Build the list of frame addresses: from input file, build it if not exist (run profiler through xcst) FarList = LoadFarList(Input_FarListFile) check = dict() for i in FarList: F = FrameDesc(i) key = "{0:02d}_{1:02d}_{2:02d}_{3:02d}".format(F.BlockType, F.Top, F.Row, F.Major) if key in check: check[key] += 1 else: check[key]=0 if verbosity > 1: for k,v in sorted(check.items(), key=lambda x:x[0]): logfile.write('{0:s} = {1:d}\n'.format(k, v)) #Step 2: Build the list of frame descriptors from EBC+EBD (essential bits) EBC_FrameList = EBC_to_FrameList(Input_EBCFile, Input_EBDFile, FarList) #Step 3: Build the list of frame discriptors for complete bitstream (*.bit or *.bin) BIN_FrameList = parse_bitstream(Input_BinstreamFile, FarList) #Step 4: Compare BIN to EBC and If no mismatches found # copy essential bits (mask from) to BIN (all descriptors will be collected there) mismatches = 0 for i in range(len(EBC_FrameList)): for k in range(FrameSize): if EBC_FrameList[i].data[k] != BIN_FrameList[i].data[k]: if verbosity > 0: logfile.write('Check EBC vs BIT: mismatch at Frame[{0:08x}]: Block={1:5d}, Top={2:5d}, Row={3:5d}, Major={4:5d}, Minor={5:5d}\n'.format(BIN_FrameList[i].GetFar(), BIN_FrameList[i].BlockType, BIN_FrameList[i].Top, BIN_FrameList[i].Row, Major, BIN_FrameList[i].Minor)) mismatches+=1 if mismatches == 0: logfile.write('\nCheck EBC vs BIT: Complete Match\n') else: logfile.write('Check EBC vs BIT: Mismatches Count = {0:d}\n'.format(mismatches)) if mismatches ==0: for i in range(len(EBC_FrameList)): BIN_FrameList[i].mask = EBC_FrameList[i].mask if CustomLutMask: LutDescTab = Table('LutMap'); LutDescTab.build_from_csv(os.path.join(targetDir, 'LUTMAP.csv')) print('Mapping LUTs to bitstream') LutMapList = MapLutToBitstream(LutDescTab, BIN_FrameList) with open(LutMapFile,'w') as f: f.write(LutListToTable(LutMapList).to_csv()) if DAVOS_Config != None and DAVOS_Config.FFIConfig.profiling: if not os.path.exists(FaultListFile): print('Profiling LUTs switching activity') ProfilingResult = Estimate_LUT_switching_activity(LutMapList, DAVOS_Config) ExportFaultList(ProfilingResult, FaultListFile) else: ProfilingResult = LoadFaultList(FaultListFile) #load from file with open(LutMapFile,'w') as f: f.write(LutListToTable(LutMapList).to_csv()) with open(os.path.join(targetDir,'BitLog.txt'),'w') as f: for i in BIN_FrameList: if all(v==0 for v in i.custom_mask): continue else: f.write(i.to_string(2)+'\n\n') for i in BIN_FrameList: if i.custom_mask==[]: i.mask = [0x0]*FrameSize else: for k in range(FrameSize): i.mask[k] = i.custom_mask[k] #(i.mask[k] ^ i.custom_mask[k]) & i.mask[k] #raw_input('Difference with custom mask...') #Step 5: append descriptors for FAR items which should be recovered after injection (BRAM) RecoveryRamLocations = [] FAR_CLB = set() T = Table('Cells') T.build_from_csv(Input_CellDescFile) for node in RecoveryNodeNames: #print("Locations for {}".format(node)) for i in T.query({'Node':node, 'BellType':'RAMB'}): RecoveryRamLocations.append(i['CellLocation']) #print("Recovery Ram Locations: {}".format(str(RecoveryRamLocations)) ) logfile.write('Recovery RAM Location: ' + str(RecoveryRamLocations)+'\n') #Set mask=1 for all bits of used BRAM (from *.ll file) #And build FAR recovery list - include all FAR from *.ll file containing bits of selected design units (e.g. ROM inferred on BRAM) FARmask = dict() RecoveryFrames = set() with open(Input_LLFile, 'r') as f: for line in f: matchDesc = re.search(r'([0-9abcdefABCDEF]+)\s+([0-9]+)\s+Block=([0-9a-zA-Z_]+)\s+Ram=B:(BIT|PARBIT)([0-9]+)',line, re.M) if matchDesc: FAR = int(matchDesc.group(1), 16) offset = int(matchDesc.group(2)) block = matchDesc.group(3) if block in RecoveryRamLocations: RecoveryFrames.add(FAR) word=offset/32 bit = offset%32 if FAR in FARmask: desc = FARmask[FAR] else: desc = FrameDesc(FAR) desc.mask=[0]*FrameSize FARmask[FAR] = desc desc.mask[word] |= 1<<bit for key in sorted(FARmask): for i in BIN_FrameList: if i.GetFar() == key: i.mask = FARmask[key].mask if verbosity > 2: logfile.write("{0:08x} : {1:s}\n".format(i.GetFar(), ' '.join(['{0:08x}'.format(x) for x in i.mask]))) break logfile.write('Recovery FAR: {}\n'.format(",".join(["{0:08x}".format(i) for i in sorted(list(RecoveryFrames))]))) #Export the resulting descriptor export_DescriptorFile(Output_FrameDescFile, BIN_FrameList, RecoveryFrames) populationsize = 0 for i in list(range(0, 9)): EssentialBitsPerBlockType.append(0) for i in BIN_FrameList: populationsize += i.EssentialBitsCount EssentialBitsPerBlockType[i.BlockType] += i.EssentialBitsCount #logfile.write('FAR: {0:08x} = {1:5d} Essential bits\n'.format(i.GetFar(), i.EssentialBitsCount)) print("Essential bits per type: "+str(EssentialBitsPerBlockType)) logfile.write('Population Size: {0:10d}\n'.format(populationsize)) return(Output_FrameDescFile, FaultListFile, LutMapFile)
def build_FFI_report(DavosConfig, ExportLutCsv=False): datamodel = DataModel() if not os.path.exists(DavosConfig.report_dir): os.makedirs(DavosConfig.report_dir) datamodel.ConnectDatabase(DavosConfig.get_DBfilepath(False), DavosConfig.get_DBfilepath(True)) datamodel.RestoreHDLModels(DavosConfig.parconf) datamodel.RestoreEntity(DataDescriptors.InjTarget) datamodel.SaveHdlModels() for conf in DavosConfig.parconf: model = datamodel.GetHdlModel(conf.label) Tab = Table('LutMapList') Tab.build_from_csv(os.path.join(conf.work_dir, 'LutMapList.csv')) LutMapList = TableToLutList(Tab) if ExportLutCsv: AggregateLutResults( LutMapList, os.path.join(conf.work_dir, './log/Injector.log')) with open(os.path.join(conf.work_dir, 'LutResult.csv'), 'w') as f: zTab = LutListToTable(LutMapList, True, False) f.write(zTab.to_csv()) if DavosConfig.FFIConfig.profiling: ExportProfilingStatistics( LutMapList, os.path.join(conf.work_dir, 'PerFrame.csv')) SummaryTable = Table( model.Label, ['ID', 'Target', 'FAR', 'word', 'bit', 'Actime', 'FailureMode']) lut_dict = build_lut_coord_dict(LutMapList, '') ff_dict = load_Map_dict(os.path.join(conf.work_dir, 'FFMapList.csv')) ram_dict = load_Map_dict(os.path.join(conf.work_dir, 'BramMapList.csv')) lutram_dict = load_Map_dict( os.path.join(conf.work_dir, 'LutramMapList.csv')) #ff_dict, ram_dict = build_regmem_coord_dict(os.path.join(conf.work_dir, 'Bitstream.ll'), os.path.join(conf.work_dir, 'Bels.csv')) ExpDescIdCnt = datamodel.GetMaxKey(DataDescriptors.InjectionExp) + 1 with open(os.path.join(conf.work_dir, './log/Injector.log'), 'rU') as f: content = f.readlines() for l in content: match = re.search(injlog_item_ptn, l) if match: coord = (int(match.group(2)), int(match.group(3)), int(match.group(4))) if coord in lut_dict: target = datamodel.GetOrAppendTarget( lut_dict[coord][0][0]['name'], 'LUT', '{}/{}'.format(lut_dict[coord][0][1], lut_dict[coord][0][2])) elif coord in ff_dict: target = datamodel.GetOrAppendTarget( ff_dict[coord]['node'], 'FF', ff_dict[coord]['case']) elif coord in ram_dict: target = datamodel.GetOrAppendTarget( ram_dict[coord]['node'], 'BRAM', ram_dict[coord]['case']) elif coord in lutram_dict: target = datamodel.GetOrAppendTarget( lutram_dict[coord]['node'], 'LUTRAM', lutram_dict[coord]['case']) else: target = datamodel.GetOrAppendTarget( 'U:{0:08x}:{1:03d}:{2:02d}'.format( int(match.group(2)), int(match.group(3)), int(match.group(4))), 'TYPE0', '') InjDesc = InjectionDescriptor() InjDesc.InjectionTime = float(match.group(5)) fmode = match.group(6).lower() InjDesc.FailureMode = 'M' if fmode.find( 'masked') >= 0 else 'L' if fmode.find( 'latent') >= 0 else 'C' if fmode.find( 'sdc') >= 0 else 'S' if fmode.find('signaled' ) >= 0 else 'X' InjDesc.ID = ExpDescIdCnt InjDesc.ModelID = model.ID InjDesc.TargetID = target.ID InjDesc.FaultModel = 'BitFlip' InjDesc.ForcedValue = '' InjDesc.InjectionDuration = float(0) InjDesc.ObservationTime = float(0) InjDesc.Node = target.NodeFullPath InjDesc.InjCase = target.InjectionCase InjDesc.Status = 'F' InjDesc.FaultToFailureLatency = float(0) InjDesc.ErrorCount = 0 InjDesc.Dumpfile = '' datamodel.LaunchedInjExp_dict[InjDesc.ID] = InjDesc actime = float(-1.0) if match.group(8) == None else float( match.group(8)) SummaryTable.add_row( map(str, [ int(match.group(1)), target.NodeFullPath, coord[0], coord[1], coord[2], actime, InjDesc.FailureMode ])) ExpDescIdCnt += 1 if ExpDescIdCnt % 100 == 0: sys.stdout.write('Processed report lines: {0}\r'.format( str(ExpDescIdCnt))) SummaryTable.to_csv( ';', True, os.path.join(DavosConfig.report_dir, '{0}.csv'.format(model.Label))) T = SummaryTable.to_html_table('SEU_LUT_Details') T.to_file( os.path.join(DavosConfig.report_dir, '{0}.html'.format(model.Label))) datamodel.SaveHdlModels() datamodel.SaveTargets() datamodel.SaveInjections() build_report(DavosConfig, DavosConfig.toolconf, datamodel) datamodel.SyncAndDisconnectDB()
def load_fault_list_csv(self, infile): Fdesctab = Table('Fdesc') Fdesctab.build_from_csv(infile) fdesc, idx, i, MaxRows = None, -1, 0, Fdesctab.rownum() while i < MaxRows: fdesc = FaultDescriptor( int(Fdesctab.getByLabel('Id', i)), int(Fdesctab.getByLabel('CellType', i)), int(Fdesctab.getByLabel('Multiplicity', i))) fdesc.PartIdx = int(Fdesctab.getByLabel('PartIdx', i)) fdesc.FailureMode = Fdesctab.getByLabel('FailureMode', i) for seu_idx in range(fdesc.Multiplicity): seu = SEU_item() seu.Offset = int(Fdesctab.getByLabel('Offset', i)) seu.DesignNode = Fdesctab.getByLabel('DesignNode', i) seu.SLR = int(Fdesctab.getByLabel('SLR', i), 16) seu.FAR = int(Fdesctab.getByLabel('FAR', i), 16) seu.Word = int(Fdesctab.getByLabel('Word', i)) seu.Bit = int(Fdesctab.getByLabel('Bit', i)) seu.Mask = int(Fdesctab.getByLabel('Mask', i), 16) seu.Time = int(Fdesctab.getByLabel('Time', i)) fdesc.SeuItems.append(seu) i += 1 self.fault_list.append(fdesc) print('Fault descriptors restored from {0:s} : {1:d} items'.format( infile, len(self.fault_list)))