def calculateTransferRates(pCtx): print("%sStart: Transfer rate calculations..." % (os.linesep)) if "jobIds" in pCtx["ElapsedTimeData"]: for l_JobId in pCtx["ElapsedTimeData"]["jobIds"]: if "jobStepIds" in pCtx["ElapsedTimeData"]["jobIds"][l_JobId]: for l_JobStepId in pCtx["ElapsedTimeData"]["jobIds"][l_JobId][ "jobStepIds"]: l_JobIdEntry = pCtx["ElapsedTimeData"]["jobIds"][l_JobId][ "jobStepIds"][l_JobStepId] l_ElapsedTime = float( cmn.calculateTimeDifferenceInSeconds( l_JobIdEntry["EndDateTime"], l_JobIdEntry["StartDateTime"])) l_JobIdEntry["ElapsedTime (secs)"] = l_ElapsedTime if l_ElapsedTime: l_JobIdEntry[ "JobId/JobStepId TransferRate (GB/sec)"] = round( (float(l_JobIdEntry["SizeTransferred"]) / float(l_ElapsedTime)) / float(10**9), 6) else: l_JobIdEntry[ "JobId/JobStepId TransferRate (GB/sec)"] = None l_JobIdEntry["NumberOfConnections"] = cmn.numericFormat( l_JobIdEntry["NumberOfConnections"]) l_JobIdEntry["NumberOfContribIds"] = cmn.numericFormat( l_JobIdEntry["NumberOfContribIds"]) l_JobIdEntry["SizeTransferred"] = cmn.numericFormat( l_JobIdEntry["SizeTransferred"]) if "servers" in pCtx["ElapsedTimeData"]["jobIds"][l_JobId][ "jobStepIds"][l_JobStepId]: for l_Server in pCtx["ElapsedTimeData"]["jobIds"][ l_JobId]["jobStepIds"][l_JobStepId]["servers"]: l_ServerEntry = pCtx["ElapsedTimeData"]["jobIds"][ l_JobId]["jobStepIds"][l_JobStepId]["servers"][ l_Server] l_ElapsedTime = float( cmn.calculateTimeDifferenceInSeconds( l_ServerEntry["EndDateTime"], l_ServerEntry["StartDateTime"])) l_ServerEntry["ElapsedTime (secs)"] = l_ElapsedTime if l_ElapsedTime: l_ServerEntry[ "Server TransferRate (GB/sec)"] = round( (float( l_ServerEntry["SizeTransferred"]) / float(l_ElapsedTime)) / float(10**9), 6) else: l_ServerEntry[ "Server TransferRate (GB/sec)"] = None l_ServerEntry[ "NumberOfContribIds"] = cmn.numericFormat( l_ServerEntry["NumberOfContribIds"]) l_ServerEntry[ "SizeTransferred"] = cmn.numericFormat( l_ServerEntry["SizeTransferred"]) if "connections" in l_ServerEntry: for l_Connection in l_ServerEntry[ "connections"]: l_ConnectionEntry = l_ServerEntry[ "connections"][l_Connection] l_ElapsedTime = float( cmn.calculateTimeDifferenceInSeconds( l_ConnectionEntry["EndDateTime"], l_ConnectionEntry["StartDateTime"]) ) l_ConnectionEntry[ "ElapsedTime (secs)"] = l_ElapsedTime if l_ElapsedTime: l_ConnectionEntry[ "Connection TransferRate (GB/sec)"] = round( (float(l_ConnectionEntry[ "SizeTransferred"]) / float(l_ElapsedTime)) / float(10**9), 6) else: l_ConnectionEntry[ "Connection TransferRate (GB/sec)"] = None l_ConnectionEntry[ "NumberOfContribIds"] = cmn.numericFormat( l_ConnectionEntry[ "NumberOfContribIds"]) l_ConnectionEntry[ "SizeTransferred"] = cmn.numericFormat( l_ConnectionEntry[ "SizeTransferred"]) # Print the results to a file l_PathFileName = os.path.join(pCtx["ROOTDIR"], "Analysis", "TransferRates.txt") cmn.printFormattedFile(pCtx, l_PathFileName, pCtx["ElapsedTimeData"]) l_PathFileName = os.path.join(pCtx["ROOTDIR"], "Analysis", "TransferRates.json") cmn.printFormattedFileAsJson(pCtx, l_PathFileName, pCtx["ElapsedTimeData"]) print("%s End: Transfer rate calculations..." % (os.linesep)) return
def performBasicAnalysis(pCtx): print("%sStart: Perform basic analysis..." % (os.linesep)) l_TestOutput = [] l_Data = pCtx["ServerData"] l_StageInData = pCtx["StageInData"]["jobIds"] l_StageOutData = pCtx["StageOutData"]["jobIds"] # Determine list of servers l_TestOutput.append("List of Servers%s" % (os.linesep)) l_Servers = cmn.getServers(l_Data) l_TestOutput.append("%d server(s), %s%s" % (len(l_Servers), ` l_Servers `, os.linesep)) l_TestOutput.append("%s" % (os.linesep)) # Determine list of jobids l_TestOutput.append("List of JobIds%s" % (os.linesep)) l_JobIds = cmn.getJobIds(l_Data) l_TestOutput.append("%d jobid(s), %s%s" % (len(l_JobIds), ` l_JobIds `, os.linesep)) l_TestOutput.append("%s" % (os.linesep)) # Determine jobids per server l_TestOutput.append("List of JobIds per Server%s" % (os.linesep)) for l_Server in l_Servers: l_JobIds = cmn.getJobIdsForServer(l_Data, l_Server) l_TestOutput.append( "For server %s, %d jobid(s), %s%s" % (l_Server, len(l_JobIds), ` l_JobIds `, os.linesep)) l_TestOutput.append("%s" % (os.linesep)) # Determinne servers per jobid l_TestOutput.append("List of Servers per JobId%s" % (os.linesep)) l_JobIds = cmn.getJobIds(l_Data) for l_JobId in l_JobIds: l_Servers = cmn.getServersForJobid(l_Data, l_JobId) l_TestOutput.append( "For jobid %d, %d server(s), %s%s" % (l_JobId, len(l_Servers), ` l_Servers `, os.linesep)) l_TestOutput.append("%s" % (os.linesep)) # Determine handles per jobid, per server, per jobstepid, per connection l_TestOutput.append( "List of Handles per JobId, per Server, per JobStepId, per Connection%s" % (os.linesep)) for l_JobId in l_JobIds: l_Servers = cmn.getServersForJobid(l_Data, l_JobId) l_TestOutput.append("%sJobId %d, %d server(s)%s" % (2 * " ", l_JobId, len(l_Servers), os.linesep)) for l_Server in l_Servers: l_JobStepIds = cmn.getJobStepIdsForServerJobId( l_Data, l_Server, l_JobId) l_TestOutput.append( "%sJobId %d, server %s, %d jobstep(s)%s" % (4 * " ", l_JobId, l_Server, len(l_JobStepIds), os.linesep)) for l_JobStepId in l_JobStepIds: l_ConnectionHandleData = cmn.getHandlesPerConnectionForJobIdJobStepId( l_Data, l_Server, l_JobId, l_JobStepId) l_ConnectionHandles = l_ConnectionHandleData.keys() l_ConnectionHandles.sort() l_TestOutput.append("%sJobStepId %d, %d connection(s)%s" % (6 * " ", l_JobStepId, len(l_ConnectionHandles), os.linesep)) for l_Connection in l_ConnectionHandles: l_Handles = l_ConnectionHandleData[l_Connection] l_Handles.sort() l_TestOutput.append("%sConnection %s. %d handle(s), %s%s" % (8 * " ", l_Connection, len(l_Handles), ` l_Handles `, os.linesep)) l_TestOutput.append("%s" % (os.linesep)) # Print out stagein and stageout logs for the found jobids l_TestOutput.append("StageIn and StageOut Log Data for Found Jobs%s" % (os.linesep)) for l_JobId in l_JobIds: l_TestOutput.append("%sJobId %d%s" % (2 * " ", l_JobId, os.linesep)) l_TestOutput.append("%sStageIn Log Data%s" % (4 * " ", os.linesep)) l_OutputGenerated = False if l_JobId in l_StageInData: for l_Line in l_StageInData[l_JobId]: l_TestOutput.append("%s%s%s" % (6 * " ", l_Line, os.linesep)) l_OutputGenerated = True if not l_OutputGenerated: l_TestOutput.append("%sNo stagein data found%s" % (6 * " ", os.linesep)) l_TestOutput.append("%s" % (os.linesep)) l_TestOutput.append("%sStageOut Log Data%s" % (4 * " ", os.linesep)) l_OutputGenerated = False if l_JobId in l_StageOutData: for l_Line in l_StageOutData[l_JobId]: l_TestOutput.append("%s%s%s" % (6 * " ", l_Line, os.linesep)) l_OutputGenerated = True if not l_OutputGenerated: l_TestOutput.append("%sNo stageout data found%s" % (6 * " ", os.linesep)) l_TestOutput.append("%s" % (os.linesep)) # Write out the basic results l_PathFileName = os.path.join(pCtx["ROOTDIR"], "Analysis", "BasicData.txt") cmn.writeOutput(pCtx, l_PathFileName, l_TestOutput) print("Basic results written to %s%s" % (l_PathFileName, os.linesep)) # Start detailed analysis per jobid/jobstepid, per server # # For each jobid... l_JobIds = cmn.getJobIds(l_Data) for l_JobId in l_JobIds: l_Output = {} l_Output[l_JobId] = {} l_NumberOfConnectionsForAllServers = 0 l_NumberOfHandlesForAllServers = 0 # For each server... l_Servers = cmn.getServersForJobid(l_Data, l_JobId) l_NumberOfServers = len(l_Servers) for l_Server in l_Servers: l_Output[l_JobId][l_Server] = {} l_NumberOfConnectionsForServer = 0 l_NumberOfHandlesForServer = 0 # For each jobstepid... l_JobStepIds = cmn.getJobStepIdsForServerJobId( l_Data, l_Server, l_JobId) for l_JobStepId in l_JobStepIds: l_Output[l_JobId][l_Server][l_JobStepId] = {} l_Output[l_JobId][l_Server][l_JobStepId]["handles"] = {} l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "connections"] = {} l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "NumberOfConnections"] = 0 l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "SizeTransferred"] = 0 l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "ProcessingTimes (ContribId Min/Max)"] = [None, None] l_Output[l_JobId][l_Server][l_JobStepId][ "NotSuccessfulHandles"] = [] # For each handle... l_Handles = cmn.getHandlesForServer(l_Data, l_Server) l_NumberOfHandles = 0 l_HandleProcessingTimes = copy.deepcopy( l_Output[l_JobId][l_Server][l_JobStepId]["handles"] ["ProcessingTimes (ContribId Min/Max)"]) l_NotSuccessfulHandles = [] for l_Handle in l_Handles: if l_Data[l_Server]["Handles"][l_Handle][ "JobId"] == l_JobId and l_Data[l_Server]["Handles"][ l_Handle]["JobStepId"] == l_JobStepId: # JobId and JobStepId matches... l_NumberOfHandles = l_NumberOfHandles + 1 l_NumberOfHandlesForServer = l_NumberOfHandlesForServer + 1 l_NumberOfHandlesForAllServers = l_NumberOfHandlesForAllServers + 1 if "Status" in l_Data[l_Server]["Handles"][l_Handle]: if l_Data[l_Server]["Handles"][l_Handle][ "Status"] != "BBFULLSUCCESS": l_NotSuccessfulHandles.append( (l_Handle, l_Data[l_Server]["Handles"] [l_Handle]["Status"])) else: l_NotSuccessfulHandles.append( (l_Handle, "NOT_COMPLETED")) l_NumberOfConnections = l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["NumberOfConnections"] l_SizeTransferred = l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["SizeTransferred"] # For each connection... l_Connections = l_Data[l_Server]["Handles"][l_Handle][ "Connections"] for l_Connection in l_Connections: if l_Connection not in l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["connections"]: l_NumberOfConnections = l_NumberOfConnections + 1 l_NumberOfConnectionsForServer = l_NumberOfConnectionsForServer + 1 l_NumberOfConnectionsForAllServers = l_NumberOfConnectionsForAllServers + 1 l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][ l_Connection] = {} l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"] = {} l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"][ "processingTimes (File Min/Max)"] = [ None, None ] l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"][ "readTimes (File Min/Max)"] = [ None, None ] l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"][ "writeTimes (File Min/Max)"] = [ None, None ] l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"][ "syncTimes (File Min/Max)"] = [ None, None ] l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"]["transferTypes"] = [] l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"]["NumberOfContribIds"] = 0 l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][l_Connection][ "contribIds"]["TotalNumberOfFiles"] = 0 # For each LVUuid... l_NumberOfContribIds = l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["connections"][ l_Connection]["contribIds"][ "NumberOfContribIds"] l_ProcessingTimes = copy.deepcopy( l_Output[l_JobId][l_Server][l_JobStepId] ["handles"]["connections"][l_Connection] ["contribIds"] ["processingTimes (File Min/Max)"]) l_TotalNumberOfFiles = l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["connections"][ l_Connection]["contribIds"][ "TotalNumberOfFiles"] l_LVUuids = l_Data[l_Server]["Handles"][l_Handle][ "Connections"][l_Connection]["LVUuids"] for l_LVUuid in l_LVUuids: # For each ContribId... l_NotSuccessfulContribIds = [] l_ContribIds = l_Data[l_Server]["Handles"][ l_Handle]["Connections"][l_Connection][ "LVUuids"][l_LVUuid]["ContribIds"] for l_ContribId in l_ContribIds: l_NumberOfContribIds = l_NumberOfContribIds + 1 if "Status" in l_Data[l_Server]["Handles"][ l_Handle]["Connections"][ l_Connection]["LVUuids"][ l_LVUuid]["ContribIds"][ l_ContribId]: if l_Data[l_Server]["Handles"][ l_Handle]["Connections"][ l_Connection]["LVUuids"][ l_LVUuid]["ContribIds"][ l_ContribId][ "Status"] != "BBFULLSUCCESS": l_NotSuccessfulContribIds.append( (l_ContribId, l_Data[l_Server]["Handles"] [l_Handle]["Connections"] [l_Connection]["LVUuids"] [l_LVUuid]["ContribIds"] [l_ContribId]["Status"])) else: l_NotSuccessfulContribIds.append( (l_ContribId, "NOT_COMPLETED")) if "SizeTransferred" in l_Data[l_Server][ "Handles"][l_Handle][ "Connections"][l_Connection][ "LVUuids"][l_LVUuid][ "ContribIds"][ l_ContribId]: l_SizeTransferred += l_Data[l_Server][ "Handles"][l_Handle]["Connections"][ l_Connection]["LVUuids"][ l_LVUuid]["ContribIds"][ l_ContribId][ "SizeTransferred"] if "ProcessingTime" in l_Data[l_Server][ "Handles"][l_Handle][ "Connections"][l_Connection][ "LVUuids"][l_LVUuid][ "ContribIds"][ l_ContribId]: l_ProcessingTime = ( l_ContribId, l_Data[l_Server] ["Handles"][l_Handle] ["Connections"][l_Connection] ["LVUuids"][l_LVUuid]["ContribIds"] [l_ContribId]["ProcessingTime"]) if l_ProcessingTimes == [None, None]: l_ProcessingTimes = [ l_ProcessingTime, l_ProcessingTime ] else: if (l_ProcessingTime[1] < l_ProcessingTimes[0][1]): l_ProcessingTimes[ 0] = l_ProcessingTime if (l_ProcessingTime[1] > l_ProcessingTimes[1][1]): l_ProcessingTimes[ 1] = l_ProcessingTime if l_HandleProcessingTimes == [ None, None ]: l_HandleProcessingTimes = l_ProcessingTimes else: if (l_ProcessingTimes[0][1] < l_HandleProcessingTimes[0] [1]): l_HandleProcessingTimes[ 0] = l_ProcessingTimes[0] if (l_ProcessingTimes[1][1] > l_HandleProcessingTimes[1] [1]): l_HandleProcessingTimes[ 1] = l_ProcessingTimes[1] l_ReadTimes = l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["connections"][ l_Connection]["contribIds"][ "readTimes (File Min/Max)"] l_WriteTimes = l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["connections"][ l_Connection]["contribIds"][ "writeTimes (File Min/Max)"] l_SyncTimes = l_Output[l_JobId][l_Server][ l_JobStepId]["handles"]["connections"][ l_Connection]["contribIds"][ "syncTimes (File Min/Max)"] l_TransferTypes = set( l_Output[l_JobId][l_Server] [l_JobStepId]["handles"]["connections"] [l_Connection]["contribIds"] ["transferTypes"]) if "Files" in l_Data[l_Server]["Handles"][ l_Handle]["Connections"][ l_Connection]["LVUuids"][ l_LVUuid]["ContribIds"][ l_ContribId]: l_Files = l_Data[l_Server]["Handles"][ l_Handle]["Connections"][ l_Connection]["LVUuids"][ l_LVUuid]["ContribIds"][ l_ContribId]["Files"] # For each file... for l_File in l_Files: l_TotalNumberOfFiles += 1 if "ReadCount" in l_Data[l_Server][ "Handles"][l_Handle][ "Connections"][l_Connection][ "LVUuids"][l_LVUuid][ "ContribIds"][ l_ContribId][ "Files"][ l_File]: l_ReadTime = ( l_ContribId, l_File, (l_Data[l_Server] ["Handles"][l_Handle] ["Connections"] [l_Connection]["LVUuids"] [l_LVUuid]["ContribIds"] [l_ContribId]["Files"] [l_File]["ReadCount"], l_Data[l_Server] ["Handles"][l_Handle] ["Connections"] [l_Connection]["LVUuids"] [l_LVUuid]["ContribIds"] [l_ContribId]["Files"] [l_File]["ReadTime"])) if l_ReadTimes == [None, None]: l_ReadTimes = [ l_ReadTime, l_ReadTime ] else: if (l_ReadTime[2][1] < l_ReadTimes[0][2] [1]): l_ReadTimes[ 0] = l_ReadTime if (l_ReadTime[2][1] > l_ReadTimes[1][2] [1]): l_ReadTimes[ 1] = l_ReadTime l_WriteTime = ( l_ContribId, l_File, (l_Data[l_Server] ["Handles"][l_Handle] ["Connections"] [l_Connection]["LVUuids"] [l_LVUuid]["ContribIds"] [l_ContribId]["Files"] [l_File]["WriteCount"], l_Data[l_Server] ["Handles"][l_Handle] ["Connections"] [l_Connection]["LVUuids"] [l_LVUuid]["ContribIds"] [l_ContribId]["Files"] [l_File]["WriteTime"])) if l_WriteTimes == [ None, None ]: l_WriteTimes = [ l_WriteTime, l_WriteTime ] else: if (l_WriteTime[2][1] < l_WriteTimes[0][2] [1]): l_WriteTimes[ 0] = l_WriteTime if (l_WriteTime[2][1] > l_WriteTimes[1][2] [1]): l_WriteTimes[ 1] = l_WriteTime l_TransferTypes.add( l_Data[l_Server]["Handles"] [l_Handle]["Connections"] [l_Connection]["LVUuids"] [l_LVUuid]["ContribIds"] [l_ContribId]["Files"] [l_File]["TransferType"]) # NOTE: The output will associate the sync count/time with the source file, when in fact that data is for the corresponding target files. if l_Data[l_Server]["Handles"][l_Handle][ "Connections"][l_Connection][ "LVUuids"][l_LVUuid][ "ContribIds"][l_ContribId][ "Files"][l_File][ "SyncCount"] != None: l_SyncTime = ( l_ContribId, l_File, (l_Data[l_Server] ["Handles"][l_Handle] ["Connections"] [l_Connection] ["LVUuids"][l_LVUuid] ["ContribIds"] [l_ContribId]["Files"] [l_File]["SyncCount"], l_Data[l_Server] ["Handles"][l_Handle] ["Connections"] [l_Connection] ["LVUuids"][l_LVUuid] ["ContribIds"] [l_ContribId]["Files"] [l_File]["SyncTime"])) if l_SyncTimes == [ None, None ]: l_SyncTimes = [ l_SyncTime, l_SyncTime ] else: if (l_SyncTime[2][1] < l_SyncTimes[0] [2][1]): l_SyncTimes[ 0] = l_SyncTime if (l_SyncTime[2][1] > l_SyncTimes[1] [2][1]): l_SyncTimes[ 1] = l_SyncTime # End of files... l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][ l_Connection]["contribIds"][ "readTimes (File Min/Max)"] = l_ReadTimes l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][ l_Connection]["contribIds"][ "writeTimes (File Min/Max)"] = l_WriteTimes l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][ l_Connection]["contribIds"][ "syncTimes (File Min/Max)"] = l_SyncTimes l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][ l_Connection]["contribIds"][ "transferTypes"] = [ l_TransferType for l_TransferType in l_TransferTypes ] # End of contribids... if l_NotSuccessfulContribIds: if "NotSuccessfulContribIds" not in l_Output[ l_JobId][l_Server][l_JobStepId][ "handles"]["connections"][ l_Connection][ "contribIds"]: l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["connections"][l_Connection]["contribIds"][ "NotSuccessfulContribIds"] = [] l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["connections"][l_Connection]["contribIds"]["NotSuccessfulContribIds"] = \ l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["connections"][l_Connection]["contribIds"]["NotSuccessfulContribIds"] + l_NotSuccessfulContribIds # End of LVUuids l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "connections"][l_Connection]["contribIds"][ "NumberOfContribIds"] = l_NumberOfContribIds l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "connections"][l_Connection]["contribIds"][ "processingTimes (File Min/Max)"] = copy.deepcopy( l_ProcessingTimes) l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "connections"][l_Connection]["contribIds"][ "TotalNumberOfFiles"] = l_TotalNumberOfFiles # End of connections... l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "NumberOfConnections"] = l_NumberOfConnections if l_SizeTransferred: l_Output[l_JobId][l_Server][l_JobStepId][ "handles"][ "SizeTransferred"] = l_SizeTransferred # End of handles... if l_NotSuccessfulHandles: l_Output[l_JobId][l_Server][l_JobStepId]["NotSuccessfulHandles"] = \ l_Output[l_JobId][l_Server][l_JobStepId]["NotSuccessfulHandles"] + l_NotSuccessfulHandles l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "NumberOfHandles"] = l_NumberOfHandles l_Output[l_JobId][l_Server][l_JobStepId]["handles"][ "ProcessingTimes (ContribId Min/Max)"] = copy.deepcopy( l_HandleProcessingTimes) # End of jobstepids l_Output[l_JobId][l_Server][ "NumberOfConnectionsForServer"] = l_NumberOfConnectionsForServer l_Output[l_JobId][l_Server][ "NumberOfHandlesForServer"] = l_NumberOfHandlesForServer l_Output[l_JobId][l_Server]["NumberOfJobSteps"] = len(l_JobStepIds) # End of servers... l_Output[l_JobId]["NumberOfServers"] = l_NumberOfServers l_Output[l_JobId][ "NumberOfConnectionsForAllServers"] = l_NumberOfConnectionsForAllServers l_Output[l_JobId][ "NumberOfHandlesForAllServers"] = l_NumberOfHandlesForAllServers # Format data/add average size transferred per handle data for l_JobId in l_Output: for l_Server in l_Output[l_JobId]: if type(l_Output[l_JobId][l_Server]) == dict: for l_JobStepId in l_Output[l_JobId][l_Server]: # NOTE: Not every l_Server element is a server name. # We only want to process those with a "Handles" key. try: if "SizeTransferred" in l_Output[l_JobId][ l_Server][l_JobStepId]["handles"]: if l_Output[l_JobId][l_Server][l_JobStepId][ "handles"]["SizeTransferred"]: l_Output[l_JobId][l_Server][l_JobStepId][ "handles"][ "Avg SizeTransferred/Handle"] = cmn.numericFormat( round( float( l_Output[l_JobId] [l_Server][l_JobStepId] ["handles"] ["SizeTransferred"]) / float( l_Output[l_JobId] [l_Server][l_JobStepId] ["handles"] ["NumberOfHandles"]), 3)) l_Output[l_JobId][l_Server][l_JobStepId][ "handles"][ "SizeTransferred"] = cmn.numericFormat( l_Output[l_JobId][l_Server] [l_JobStepId]["handles"] ["SizeTransferred"]) except Exception: pass ''' # \todo - Not sure we need the following commented out section... It doesn't work as currently written... @DLH # For each jobstep, calculate the min/max processing times for all contribids, across all servers for l_JobId in l_Output: for l_Server in l_Output[l_JobId]: if type(l_Output[l_JobId][l_Server]) == dict: for l_JobStepId in l_Output[l_JobId][l_Server]: if type(l_Output[l_JobId][l_Server][l_JobStepId]) == dict: if "ProcessingTimes (All Servers ContribId Min/Max)" not in l_Output[l_JobId][l_Server][l_JobStepId]: l_ServerProcessingTimes = copy.deepcopy(l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["ProcessingTimes (ContribId Min/Max)"]) else: l_ServerProcessingTimes = copy.deepcopy(l_Output[l_JobId][l_Server][l_JobStepId]["ProcessingTimes (All Servers ContribId Min/Max)"]) if (l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["ProcessingTimes (ContribId Min/Max)"][0][1] < l_ServerProcessingTimes[0][1]): l_ServerProcessingTimes[0] = l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["ProcessingTimes (ContribId Min/Max)"][0] if (l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["ProcessingTimes (ContribId Min/Max)"][1][1] > l_ServerProcessingTimes[1][1]): l_ServerProcessingTimes[1] = l_Output[l_JobId][l_Server][l_JobStepId]["handles"]["ProcessingTimes (ContribId Min/Max)"][1] l_Output[l_JobId][l_Server][l_JobStepId]["ProcessingTimes (All Servers ContribId Min/Max)"] = copy.deepcopy(l_ServerProcessingTimes) ''' # Output the results # pprint.pprint(l_Output) cmn.ensure(os.path.join(pCtx["ROOTDIR"], "Analysis", ` l_JobId `)) l_PathFileName = os.path.join(pCtx["ROOTDIR"], "Analysis", ` l_JobId `, "Details.txt") cmn.printFormattedFile(pCtx, l_PathFileName, l_Output) l_PathFileName = os.path.join(pCtx["ROOTDIR"], "Analysis", ` l_JobId `, "Details.json") cmn.printFormattedFileAsJson(pCtx, l_PathFileName, l_Output) print("Jobid details written to %s%s" % (os.path.join( pCtx["ROOTDIR"], "Analysis", ` l_JobId `), os.linesep)) print(" End: Perform basic analysis...") return
def performBSCFS_Analysis(pCtx): print("%sStart: BSCFS transfer rate calculations..." % (os.linesep)) if "BSCFS" in pCtx: for l_JobId in pCtx["BSCFS"]: for l_JobStepId in pCtx["BSCFS"][l_JobId]: for l_Server in pCtx["BSCFS"][l_JobId][l_JobStepId]: for l_Connection in pCtx["BSCFS"][l_JobId][l_JobStepId][l_Server]: l_Order = 1 l_Handles = pCtx["BSCFS"][l_JobId][l_JobStepId][l_Server][l_Connection] l_AllDone = False while not l_AllDone: l_StartTime = [None, None] for l_Handle in l_Handles.keys(): if l_Handles[l_Handle][0] == 0: if l_StartTime[1] is not None: if (cmn.compareTimes(l_StartTime, (l_Handle, l_Handles[l_Handle][3])) == 1): l_StartTime = [l_Handle, l_Handles[l_Handle][3]] else: l_StartTime = [l_Handle, l_Handles[l_Handle][3]] if l_StartTime[1] is not None: l_Handles[l_StartTime[0]][0] = l_Order l_Order += 1 else: l_AllDone = True pCtx["BSCFS_TransferRates"] = {} pCtx["BSCFS_TransferRates"][0] = ("BSCFS Iteration:", "(StartTime", "EndTime", "TotalTransferSize", "TransferRate (GB/sec))") l_Order = 1 l_AllDone = False while not l_AllDone: l_AllDone = True pCtx["BSCFS_TransferRates"][l_Order] = [None, None, 0, 0.0] for l_Server in pCtx["BSCFS"][l_JobId][l_JobStepId]: for l_Connection in pCtx["BSCFS"][l_JobId][l_JobStepId][l_Server]: l_Handles = pCtx["BSCFS"][l_JobId][l_JobStepId][l_Server][l_Connection] for l_Handle in l_Handles.keys(): if l_Handles[l_Handle][0] == l_Order: if pCtx["BSCFS_TransferRates"][l_Order][0] is not None: if (cmn.compareTimes((None, pCtx["BSCFS_TransferRates"][l_Order][0]), (None, l_Handles[l_Handle][3])) == 1): pCtx["BSCFS_TransferRates"][l_Order][0] = l_Handles[l_Handle][3] else: pCtx["BSCFS_TransferRates"][l_Order][0] = l_Handles[l_Handle][3] if pCtx["BSCFS_TransferRates"][l_Order][1] is not None: if (cmn.compareTimes((None, pCtx["BSCFS_TransferRates"][l_Order][1]), (None, l_Handles[l_Handle][4])) == -1): pCtx["BSCFS_TransferRates"][l_Order][1] = l_Handles[l_Handle][4] else: pCtx["BSCFS_TransferRates"][l_Order][1] = l_Handles[l_Handle][4] pCtx["BSCFS_TransferRates"][l_Order][2] += l_Handles[l_Handle][5] l_AllDone = False break if l_AllDone == False: l_ElapsedTime = float(cmn.calculateTimeDifferenceInSeconds((None, pCtx["BSCFS_TransferRates"][l_Order][1]), (None, pCtx["BSCFS_TransferRates"][l_Order][0]))) pCtx["BSCFS_TransferRates"][l_Order][3] = round((float(pCtx["BSCFS_TransferRates"][l_Order][2]) / float(l_ElapsedTime)) / float(10**9),6) l_Order += 1 # Remove the last, as that has no data if "BSCFS_TransferRates" in pCtx: pCtx["BSCFS_TransferRates"].pop(max(pCtx["BSCFS_TransferRates"].keys()), None) if len(pCtx["BSCFS_TransferRates"].keys()) > 0: # Print the results to a file l_PathFileName = os.path.join(pCtx["ROOTDIR"], "Analysis", "BSCFS_TransferRates.txt") cmn.printFormattedFile(pCtx, l_PathFileName, pCtx["BSCFS_TransferRates"]) l_PathFileName = os.path.join(pCtx["ROOTDIR"], "Analysis", "BSCFS_TransferRates.json") cmn.printFormattedFileAsJson(pCtx, l_PathFileName, pCtx["BSCFS_TransferRates"]) else: print(" No BSCFS transfers found...") else: print(" No BSCFS transfers found...") print("%s End: BSCFS transfer rate calculations..." % (os.linesep)) return