def setUp(self): """ test case setup """ self.request = Request({"RequestName": "test1", "JobID": 1}) self.operation1 = Operation({ "Type": "ReplicateAndRegister", "TargetSE": "CERN-USER" }) self.file = File({ "LFN": "/a/b/c", "ChecksumType": "ADLER32", "Checksum": "123456" }) self.request.addOperation(self.operation1) self.operation1.addFile(self.file) self.operation2 = Operation() self.operation2.Type = "RemoveFile" self.operation2.addFile(File({"LFN": "/c/d/e"})) self.request.addOperation(self.operation2) # ## set some defaults gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/RequestManagement', 'Test') gConfig.setOptionValue( '/Systems/RequestManagement/Test/Databases/ReqDB/Host', 'localhost') gConfig.setOptionValue( '/Systems/RequestManagement/Test/Databases/ReqDB/DBName', 'ReqDB') gConfig.setOptionValue( '/Systems/RequestManagement/Test/Databases/ReqDB/User', 'Dirac') self.i = 1000
def main(): Script.registerSwitch("N:", "NumberOfProcessors=", "Run n parallel copies of the benchmark") Script.registerSwitch("U", "Update", "Update dirac.cfg with the resulting value") Script.registerSwitch("R:", "Reconfig=", "Update given configuration file with the resulting value") Script.parseCommandLine(ignoreErrors=True) update = False configFile = None numberOfProcessors = 0 for unprocSw in Script.getUnprocessedSwitches(): if unprocSw[0] in ("U", "Update"): update = True elif unprocSw[0] in ("R", "Reconfig"): configFile = unprocSw[1] elif unprocSw[0] in ("N", "NumberOfProcessors"): try: numberOfProcessors = int(unprocSw[1]) except ValueError: gLogger.warn("Cannot make benchmark measurements: NumberOfProcessors is not a number") # if numberOfProcessors has not been provided, try to get it from the configuration if not numberOfProcessors: numberOfProcessors = gConfig.getValue("/Resources/Computing/CEDefaults/NumberOfProcessors", 1) gLogger.info("Computing benchmark measurements on", "%d processor(s)..." % numberOfProcessors) # we want to get the logs coming from db12 gLogger.enableLogsFromExternalLibs() # multiprocessor allocations generally have a CPU Power lower than single core one. # in order to avoid having wrong estimations, we run multiple copies of the benchmark simultaneously result = multiple_dirac_benchmark(numberOfProcessors) if result is None: gLogger.error("Cannot make benchmark measurements") DIRAC.exit(1) # we take a conservative approach and use the minimum value returned as the CPU Power db12Result = min(result["raw"]) # because hardware is continuously evolving, original benchmark scores might need a correction corr = Operations().getValue("JobScheduling/CPUNormalizationCorrection", 1.0) gLogger.info("Applying a correction on the CPU power:", corr) cpuPower = round(db12Result / corr, 1) gLogger.notice("Estimated CPU power is %.1f HS06" % cpuPower) if update: gConfig.setOptionValue("/LocalSite/CPUNormalizationFactor", cpuPower) if configFile: gConfig.dumpLocalCFGToFile(configFile) else: gConfig.dumpLocalCFGToFile(gConfig.diracConfigFilePath) DIRAC.exit()
def updateConfig(self, pilotStartTime=None): """Populate /LocalSite/MACHINEFEATURES and /LocalSite/JOBFEATURES with MJF values This is run early in the job to update the configuration file that subsequent DIRAC scripts read when they start. """ if pilotStartTime: gConfig.setOptionValue('/LocalSite/JOBFEATURES/jobstart_secs', str(pilotStartTime)) for mORj in ['MACHINEFEATURES', 'JOBFEATURES']: for key in self.mjfKeys[mORj]: value = self.__fetchMachineJobFeature(mORj, key) if value is not None: gConfig.setOptionValue('/LocalSite/%s/%s' % (mORj, key), value)
def checkFunction(): """ gets CPU normalisation from MFJ or calculate itself """ from DIRAC.WorkloadManagementSystem.Client.CPUNormalization import getPowerFromMJF from ILCDIRAC.Core.Utilities.CPUNormalization import getCPUNormalization from DIRAC import gLogger, gConfig result = getCPUNormalization() if not result['OK']: gLogger.error( result['Message'] ) norm = round( result['Value']['NORM'], 1 ) gLogger.notice( 'Estimated CPU power is %.1f %s' % ( norm, result['Value']['UNIT'] ) ) mjfPower = getPowerFromMJF() if mjfPower: gLogger.notice( 'CPU power from MJF is %.1f HS06' % mjfPower ) else: gLogger.notice( 'MJF not available on this node' ) if update and not configFile: gConfig.setOptionValue( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm ) gConfig.setOptionValue( '/LocalSite/CPUNormalizationFactor', norm ) gConfig.dumpLocalCFGToFile( gConfig.diracConfigFilePath ) if configFile: from DIRAC.Core.Utilities.CFG import CFG cfg = CFG() try: # Attempt to open the given file cfg.loadFromFile( configFile ) except: pass # Create the section if it does not exist if not cfg.existsKey( 'LocalSite' ): cfg.createNewSection( 'LocalSite' ) cfg.setOption( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm ) cfg.setOption( '/LocalSite/CPUNormalizationFactor', norm ) cfg.writeToFile( configFile ) DIRAC.exit()
def setUp(self): """ test case setup """ self.request = Request({"RequestName": "test1", "JobID": 1}) self.operation1 = Operation({"Type": "ReplicateAndRegister", "TargetSE": "CERN-USER"}) self.file = File({"LFN": "/a/b/c", "ChecksumType": "ADLER32", "Checksum": "123456"}) self.request.addOperation(self.operation1) self.operation1.addFile(self.file) self.operation2 = Operation() self.operation2.Type = "RemoveFile" self.operation2.addFile(File({"LFN": "/c/d/e"})) self.request.addOperation(self.operation2) # ## set some defaults gConfig.setOptionValue("DIRAC/Setup", "Test") gConfig.setOptionValue("/DIRAC/Setups/Test/RequestManagement", "Test") gConfig.setOptionValue("/Systems/RequestManagement/Test/Databases/ReqDB/Host", "localhost") gConfig.setOptionValue("/Systems/RequestManagement/Test/Databases/ReqDB/DBName", "ReqDB") gConfig.setOptionValue("/Systems/RequestManagement/Test/Databases/ReqDB/User", "Dirac") self.i = 1000
def main(): Script.registerSwitch("U", "Update", "Update dirac.cfg with the resulting value") Script.registerSwitch( "R:", "Reconfig=", "Update given configuration file with the resulting value") Script.parseCommandLine(ignoreErrors=True) update = False configFile = None for unprocSw in Script.getUnprocessedSwitches(): if unprocSw[0] in ("U", "Update"): update = True elif unprocSw[0] in ("R", "Reconfig"): configFile = unprocSw[1] result = singleDiracBenchmark(1) if result is None: gLogger.error('Cannot make benchmark measurements') DIRAC.exit(1) db12Measured = round(result['NORM'], 1) corr = Operations().getValue('JobScheduling/CPUNormalizationCorrection', 1.) norm = round(result['NORM'] / corr, 1) gLogger.notice('Estimated CPU power is %.1f HS06' % norm) if update: gConfig.setOptionValue('/LocalSite/CPUNormalizationFactor', norm) gConfig.setOptionValue('/LocalSite/DB12measured', db12Measured) if configFile: gConfig.dumpLocalCFGToFile(configFile) else: gConfig.dumpLocalCFGToFile(gConfig.diracConfigFilePath) DIRAC.exit()
def setUp(self): """set up the objects""" super(JobTestCase, self).setUp() from ILCDIRAC.Interfaces.API.NewInterface.Tests.LocalTestObjects import CLIParams self.basedir = os.getcwd() clip = CLIParams() clip.testOverlay=True clip.testChain = True clip.testMokka = True clip.testInputData = True clip.testWhizard = True clip.testUtilities = True overlayrun = clip.testOverlay clip.testRoot = True myMarlinSteeringFile = "bbudsc_3evt_stdreco.xml" myLCSimPreSteeringFile = "clic_cdr_prePandoraOverlay_1400.0.lcsim" if overlayrun else "clic_cdr_prePandora.lcsim" myLCSimPostSteeringFile = "clic_cdr_postPandoraOverlay.lcsim" parameterDict = dict( mokkaVersion="ILCSoft-01-17-06", mokkaSteeringFile="bbudsc_3evt.steer", detectorModel="ILD_o1_v05", machine="ilc_dbd", backgroundType="aa_lowpt", energy=350.0, marlinVersion="ILCSoft-01-17-06", marlinSteeringFile=myMarlinSteeringFile, alwaysOverlay = True, marlinInputData="/ilc/user/s/sailer/testILDsim.slcio", ildConfig = "v01-16-p10_250", gearFile='GearOutput.xml', lcsimPreSteeringFile=myLCSimPreSteeringFile, lcsimPostSteeringFile=myLCSimPostSteeringFile, ddsimVersion="ILCSoft-01-17-09", ddsimDetectorModel="CLIC_o2_v03", ddsimInputFile="Muon_50GeV_Fixed_cosTheta0.7.stdhep", inputFilesPath = 'LFN:/ilc/user/s/simoniel/stdhep_files/ttbar_3TeV/', rootVersion="ILCSoft-01-17-08" ) from ILCDIRAC.Interfaces.API.NewInterface.Tests.LocalTestObjects import TestCreater self.myTests = TestCreater(clip, parameterDict) # Differentiate between local execution and execution in docker localsitelocalarea = '' uid = os.getuid() user_info = pwd.getpwuid( uid ) homedir = os.path.join( os.sep + 'home', user_info.pw_name ) cvmfstestsdir = 'cvmfstests' if os.path.exists( homedir ): localsitelocalarea = os.path.join( homedir, cvmfstestsdir ) #os.chdir(homedir) else: localsitelocalarea = os.path.join( os.getcwd(), cvmfstestsdir ) from DIRAC import gConfig gConfig.setOptionValue( '/LocalSite/LocalArea', localsitelocalarea ) gConfig.setOptionValue( '/LocalSite/LocalSE', "CERN-DIP-4" ) #gConfig.setOptionValue( '/Operations/Defaults/AvailableTarBalls/x86_64-slc5-gcc43-opt/steeringfiles/V16/Overwrite', 'False' ) #gConfig.setOptionValue( '/Operations/Defaults/AvailableTarBalls/x86_64-slc5-gcc43-opt/steeringfiles/V18/Overwrite', 'False' ) #gConfig.setOptionValue( '/Operations/Defaults/AvailableTarBalls/x86_64-slc5-gcc43-opt/stdhepcutjava/1.0/Overwrite', 'False' ) gConfig.setOptionValue( '/Resources/Countries/local/AssignedTo' , 'ch' )
def testUserProfileDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/Framework', 'Test') host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/Host', host) gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/DBName', db) gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/User', user) gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/Password', pwd) db = UserProfileDB() assert db._connect()['OK'] userName = '******' userGroup = 'testGroup' profileName = 'testProfile' varName = 'testVar' tagName = 'testTag' hashTag = '237cadc4af90277e9524e6386e264630' data = 'testData' perms = 'USER' try: if False: for tableName in db.tableDict.keys(): result = db._update('DROP TABLE `%s`' % tableName) assert result['OK'] gLogger.info('\n Creating Table\n') # Make sure it is there and it has been created for this test result = db._checkTable() assert result == {'OK': True, 'Value': None} result = db._checkTable() assert result == {'OK': True, 'Value': 0} gLogger.info('\n Adding some data\n') result = db.storeVar(userName, userGroup, profileName, varName, data, perms) assert result['OK'] assert result['Value'] == 1 gLogger.info('\n Some queries\n') result = db.getUserGroupIds(userName, userGroup) assert result['OK'] assert result['Value'] == (1, 1, 1) result = db.listVars(userName, userGroup, profileName) assert result['OK'] assert result['Value'][0][3] == varName result = db.retrieveUserProfiles(userName, userGroup) assert result['OK'] assert result['Value'] == {profileName: {varName: data}} result = db.storeHashTag(userName, userGroup, tagName, hashTag) assert result['OK'] assert result['Value'] == hashTag result = db.retrieveAllHashTags(userName, userGroup) assert result['OK'] assert result['Value'] == {hashTag: tagName} result = db.retrieveHashTag(userName, userGroup, hashTag) assert result['OK'] assert result['Value'] == tagName gLogger.info('\n OK\n') except AssertionError: print('ERROR ', end=' ') if not result['OK']: print(result['Message']) else: print(result) sys.exit(1)
hs06JobFeature = mjf.getJobFeature('hs06') result = singleDiracBenchmark(1) if result is None: gLogger.error('Cannot make benchmark measurements') DIRAC.exit(1) db12Measured = round(result['NORM'], 1) corr = Operations().getValue('JobScheduling/CPUNormalizationCorrection', 1.) norm = round(result['NORM'] / corr, 1) gLogger.notice('Estimated CPU power is %.1f HS06' % norm) if update: gConfig.setOptionValue('/LocalSite/CPUScalingFactor', hs06JobFeature if hs06JobFeature else norm) # deprecate? gConfig.setOptionValue('/LocalSite/CPUNormalizationFactor', norm) # deprecate? gConfig.setOptionValue('/LocalSite/DB12measured', db12Measured) # Set DB12 to use by default. Remember db12JobFeature is still in /LocalSite/JOBFEATURES/db12 if db12JobFeature is not None: gConfig.setOptionValue('/LocalSite/DB12', db12JobFeature) else: gConfig.setOptionValue('/LocalSite/DB12', db12Measured) if configFile: gConfig.dumpLocalCFGToFile(configFile) else: gConfig.dumpLocalCFGToFile(gConfig.diracConfigFilePath) DIRAC.exit()
def testSystemLoggingDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/Framework', 'Test') host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Host', host) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/DBName', db) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/User', user) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Password', pwd) from DIRAC.FrameworkSystem.private.logging.Message import tupleToMessage systemName = 'TestSystem' subSystemName = 'TestSubSystem' level = 10 time = Time.toString() msgTest = 'Hello' variableText = time frameInfo = "" message = tupleToMessage((systemName, level, time, msgTest, variableText, frameInfo, subSystemName)) site = 'somewehere' longSite = 'somewehere1234567890123456789012345678901234567890123456789012345678901234567890' nodeFQDN = '127.0.0.1' userDN = 'Yo' userGroup = 'Us' remoteAddress = 'elsewhere' records = 10 db = SystemLoggingDB() assert db._connect()['OK'] try: if False: for tableName in db.tableDict.keys(): result = db._update('DROP TABLE IF EXISTS `%s`' % tableName) assert result['OK'] gLogger.info('\n Creating Table\n') # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info('\n Inserting some records\n') for k in range(records): result = db.insertMessage(message, site, nodeFQDN, userDN, userGroup, remoteAddress) assert result['OK'] assert result['lastRowId'] == k + 1 assert result['Value'] == 1 result = db.insertMessage(message, longSite, nodeFQDN, userDN, userGroup, remoteAddress) assert not result['OK'] result = db._queryDB(showFieldList=['SiteName']) assert result['OK'] assert result['Value'][0][0] == site result = db._queryDB(showFieldList=['SystemName']) assert result['OK'] assert result['Value'][0][0] == systemName result = db._queryDB(showFieldList=['SubSystemName']) assert result['OK'] assert result['Value'][0][0] == subSystemName result = db._queryDB(showFieldList=['OwnerGroup']) assert result['OK'] assert result['Value'][0][0] == userGroup result = db._queryDB(showFieldList=['FixedTextString']) assert result['OK'] assert result['Value'][0][0] == msgTest result = db._queryDB(showFieldList=['VariableText', 'SiteName'], count=True, groupColumn='VariableText') assert result['OK'] assert result['Value'][0][1] == site assert result['Value'][0][2] == records gLogger.info('\n Removing Table\n') for tableName in [ 'MessageRepository', 'FixedTextMessages', 'SubSystems', 'Systems', 'AgentPersistentData', 'ClientIPs', 'Sites', 'UserDNs' ]: result = db._update('DROP TABLE `%s`' % tableName) assert result['OK'] gLogger.info('\n OK\n') except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit(1)
def test(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/DataManagement', 'Test') host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/Host', host) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/DBName', db) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/User', user) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/Password', pwd) diDB = DataIntegrityDB() assert diDB._connect()['OK'] source = 'Test' prognosis = 'TestError' prodID = 1234 lfn = '/Test/%08d/File1' % prodID fileMetadata1 = { lfn: { 'Prognosis': prognosis, 'PFN': 'File1', 'SE': 'Test-SE' } } fileOut1 = { 'FileID': 1L, 'LFN': lfn, 'PFN': 'File1', 'Prognosis': prognosis, 'GUID': None, 'SE': 'Test-SE', 'Size': None } newStatus = 'Solved' newPrognosis = 'AnotherError' try: gLogger.info('\n Creating Table\n') # Make sure it is there and it has been created for this test result = diDB._checkTable() assert result['OK'] result = diDB._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' result = diDB.insertProblematic(source, fileMetadata1) assert result['OK'] assert result['Value'] == {'Successful': {lfn: True}, 'Failed': {}} result = diDB.insertProblematic(source, fileMetadata1) assert result['OK'] assert result['Value'] == { 'Successful': { lfn: 'Already exists' }, 'Failed': {} } result = diDB.getProblematicsSummary() assert result['OK'] assert result['Value'] == {'TestError': {'New': 1}} result = diDB.getDistinctPrognosis() assert result['OK'] assert result['Value'] == ['TestError'] result = diDB.getProblematic() assert result['OK'] assert result['Value'] == fileOut1 result = diDB.incrementProblematicRetry(result['Value']['FileID']) assert result['OK'] assert result['Value'] == 1 result = diDB.getProblematic() assert result['OK'] assert result['Value'] == fileOut1 result = diDB.getPrognosisProblematics(prognosis) assert result['OK'] assert result['Value'] == [fileOut1] result = diDB.getTransformationProblematics(prodID) assert result['OK'] assert result['Value'][lfn] == 1 result = diDB.setProblematicStatus(1, newStatus) assert result['OK'] assert result['Value'] == 1 result = diDB.changeProblematicPrognosis(1, newPrognosis) assert result['OK'] assert result['Value'] == 1 result = diDB.getPrognosisProblematics(prognosis) assert result['OK'] assert result['Value'] == [] result = diDB.removeProblematic(1) assert result['OK'] assert result['Value'] == 1 result = diDB.getProblematicsSummary() assert result['OK'] assert result['Value'] == {} gLogger.info('\n Removing Table\n') result = diDB._update('DROP TABLE `%s`' % diDB.tableName) assert result['OK'] gLogger.info('\n OK\n') except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit(1)
def testUserProfileDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue( 'DIRAC/Setup', 'Test' ) gConfig.setOptionValue( '/DIRAC/Setups/Test/Framework', 'Test' ) host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/Host', host ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/DBName', db ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/User', user ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/Password', pwd ) db = UserProfileDB() assert db._connect()['OK'] userName = '******' userGroup = 'testGroup' profileName = 'testProfile' varName = 'testVar' tagName = 'testTag' hashTag = '237cadc4af90277e9524e6386e264630' data = 'testData' perms = 'USER' try: if False: for tableName in db.tableDict.keys(): result = db._update( 'DROP TABLE `%s`' % tableName ) assert result['OK'] gLogger.info( '\n Creating Table\n' ) # Make sure it is there and it has been created for this test result = db._checkTable() assert result == {'OK': True, 'Value': None } result = db._checkTable() assert result == {'OK': True, 'Value': 0} gLogger.info( '\n Adding some data\n' ) result = db.storeVar( userName, userGroup, profileName, varName, data, perms ) assert result['OK'] assert result['Value'] == 1 gLogger.info( '\n Some queries\n' ) result = db.getUserGroupIds( userName, userGroup ) assert result['OK'] assert result['Value'] == ( 1, 1, 1 ) result = db.listVars( userName, userGroup, profileName ) assert result['OK'] assert result['Value'][0][3] == varName result = db.retrieveUserProfiles( userName, userGroup ) assert result['OK'] assert result['Value'] == { profileName: { varName: data } } result = db.storeHashTag( userName, userGroup, tagName, hashTag ) assert result['OK'] assert result['Value'] == hashTag result = db.retrieveAllHashTags( userName, userGroup ) assert result['OK'] assert result['Value'] == { hashTag: tagName } result = db.retrieveHashTag( userName, userGroup, hashTag ) assert result['OK'] assert result['Value'] == tagName gLogger.info( '\n OK\n' ) except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit( 1 )
if not result['OK']: gLogger.error( result['Message'] ) norm = int( ( result['Value']['NORM'] + 0.05 ) * 10 ) / 10. gLogger.notice( 'Estimated CPU power is %.1f %s' % ( norm, result['Value']['UNIT'] ) ) mjfPower = getPowerFromMJF() if mjfPower: gLogger.notice( 'CPU power from MJF is %.1f HS06' % mjfPower ) else: gLogger.notice( 'MJF not available on this node' ) if update and not configFile: gConfig.setOptionValue( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm ) gConfig.setOptionValue( '/LocalSite/CPUNormalizationFactor', norm ) gConfig.dumpLocalCFGToFile( gConfig.diracConfigFilePath ) if configFile: from DIRAC.Core.Utilities.CFG import CFG cfg = CFG() try: # Attempt to open the given file cfg.loadFromFile( configFile ) except: pass # Create the section if it does not exist if not cfg.existsKey( 'LocalSite' ): cfg.createNewSection( 'LocalSite' ) cfg.setOption( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm )
gLogger.error( " There was a problem generating proxy to be uploaded proxy manager: %s" % retVal[ 'Message' ] ) return False return True if __name__ == "__main__": cliParams = CLIParams() cliParams.registerCLISwitches() Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1], 'Usage:', ' %s [option|cfgfile] ...' % Script.scriptName, ] ) ) Script.disableCS() Script.parseCommandLine() gConfig.setOptionValue( "/DIRAC/Security/UseServerCertificate", "no" ) diracGroup = cliParams.getDIRACGroup() time = cliParams.getProxyLifeTime() retVal = generateProxy( cliParams ) if not retVal[ 'OK' ]: gLogger.error( "Can't create a proxy: %s" % retVal[ 'Message' ] ) sys.exit( 1 ) gLogger.info( "Proxy created" ) Script.enableCS() retVal = getProxyInfo( retVal[ 'Value' ] ) if not retVal[ 'OK' ]: gLogger.error( "Can't create a proxy: %s" % retVal[ 'Message' ] )
def test(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/DataManagement', 'Test') host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Host', host) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/DBName', db) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/User', user) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Password', pwd) db = DataLoggingDB() assert db._connect()['OK'] lfns = ['/Test/00001234/File1', '/Test/00001234/File2'] status = 'TestStatus' minor = 'MinorStatus' date1 = Time.toString() date2 = Time.dateTime() source = 'Somewhere' fileTuples = ((lfns[0], status, minor, date1, source), (lfns[1], status, minor, date2, source)) try: gLogger.info('\n Creating Table\n') # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info('\n Inserting some records\n') result = db.addFileRecord(lfns, status, date='2012-04-28 09:49:02.545466') assert result['OK'] assert result['Value'] == 2 assert result['lastRowId'] == 2 result = db.addFileRecords(fileTuples) assert result['OK'] gLogger.info('\n Retrieving some records\n') result = db.getFileLoggingInfo(lfns[0]) assert result['OK'] assert len(result['Value']) == 2 result = db.getFileLoggingInfo(lfns[1]) assert result['OK'] assert len(result['Value']) == 2 result = db.getUniqueStates() assert result['OK'] assert result['Value'] == [status] gLogger.info('\n Removing Table\n') result = db._update('DROP TABLE `%s`' % db.tableName) assert result['OK'] gLogger.info('\n OK\n') except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit(1)
def test(): """ Some test cases """ host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'DataIntegrityDB' gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/Host', host ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/DBName', db ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/User', user ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataIntegrityDB/Password', pwd ) diDB = DataIntegrityDB() assert diDB._connect()['OK'] source = 'Test' prognosis = 'TestError' prodID = 1234 lfn = '/Test/%08d/File1' % prodID fileMetadata1 = {lfn: {'Prognosis': prognosis, 'PFN': 'File1', 'SE': 'Test-SE'}} fileOut1 = {'FileID': 1L, 'LFN': lfn, 'PFN': 'File1', 'Prognosis': prognosis, 'GUID': None, 'SE': 'Test-SE', 'Size': None} newStatus = 'Solved' newPrognosis = 'AnotherError' try: gLogger.info( '\n Creating Table\n' ) # Make sure it is there and it has been created for this test result = diDB._checkTable() assert result['OK'] result = diDB._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' result = diDB.insertProblematic( source, fileMetadata1 ) assert result['OK'] assert result['Value'] == {'Successful': {lfn: True}, 'Failed': {}} result = diDB.insertProblematic( source, fileMetadata1 ) assert result['OK'] assert result['Value'] == {'Successful': {lfn: 'Already exists'}, 'Failed': {}} result = diDB.getProblematicsSummary() assert result['OK'] assert result['Value'] == {'TestError': {'New': 1}} result = diDB.getDistinctPrognosis() assert result['OK'] assert result['Value'] == ['TestError'] result = diDB.getProblematic() assert result['OK'] assert result['Value'] == fileOut1 result = diDB.incrementProblematicRetry( result['Value']['FileID'] ) assert result['OK'] assert result['Value'] == 1 result = diDB.getProblematic() assert result['OK'] assert result['Value'] == fileOut1 result = diDB.getPrognosisProblematics( prognosis ) assert result['OK'] assert result['Value'] == [fileOut1] result = diDB.getTransformationProblematics( prodID ) assert result['OK'] assert result['Value'][lfn] == 1 result = diDB.setProblematicStatus( 1, newStatus ) assert result['OK'] assert result['Value'] == 1 result = diDB.changeProblematicPrognosis( 1, newPrognosis ) assert result['OK'] assert result['Value'] == 1 result = diDB.getPrognosisProblematics( prognosis ) assert result['OK'] assert result['Value'] == [] result = diDB.removeProblematic( 1 ) assert result['OK'] assert result['Value'] == 1 result = diDB.getProblematicsSummary() assert result['OK'] assert result['Value'] == {} gLogger.info( '\n Removing Table\n' ) result = diDB._update( 'DROP TABLE `%s`' % diDB.tableName ) assert result['OK'] gLogger.info( '\n OK\n' ) except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit( 1 )
if not result['OK']: gLogger.error(result['Message']) norm = int((result['Value']['NORM'] + 0.05) * 10) / 10. gLogger.notice('Estimated CPU power is %.1f %s' % (norm, result['Value']['UNIT'])) mjfPower = getPowerFromMJF() if mjfPower: gLogger.notice('CPU power from MJF is %.1f HS06' % mjfPower) else: gLogger.notice('MJF not available on this node') if update and not configFile: gConfig.setOptionValue('/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm) gConfig.setOptionValue('/LocalSite/CPUNormalizationFactor', norm) gConfig.dumpLocalCFGToFile(gConfig.diracConfigFilePath) if configFile: from DIRAC.Core.Utilities.CFG import CFG cfg = CFG() try: # Attempt to open the given file cfg.loadFromFile(configFile) except: pass # Create the section if it does not exist if not cfg.existsKey('LocalSite'): cfg.createNewSection('LocalSite') cfg.setOption('/LocalSite/CPUScalingFactor',
def setUp( self ): """ set up :param self: self reference """ self.log = gLogger.getSubLogger( self.__class__.__name__ ) if not self.__db: gConfig.setOptionValue( "/DIRAC/Setup", "Test" ) gConfig.setOptionValue( "/DIRAC/Setups/Test/DataManagement", "Test" ) spath = "/Systems/DataManagement/Test/Databases/DataLoggingDB" gConfig.setOptionValue( "%s/%s" % ( spath, "Host" ), "127.0.0.1" ) gConfig.setOptionValue( "%s/%s" % ( spath, "DBName" ), "AccountingDB" ) gConfig.setOptionValue( "%s/%s" % ( spath, "User" ), "Dirac" ) gConfig.setOptionValue( "%s/%s" % ( spath, "Password" ), "Dirac" ) self.__db = DataLoggingDB()
def setUp(self): """ test case set up """ # ## set some defaults gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/DataManagement', 'Test') gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/FTSDB/Host', 'localhost') gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/FTSDB/DBName', 'FTSDB') gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/FTSDB/User', 'Dirac') self.ftsSites = [ FTSSite({ "FTSServer": "https://fts22-t0-export.cern.ch:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "CERN.ch" }), FTSSite({ "FTSServer": "https://fts.pic.es:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "PIC.es" }), FTSSite({ "FTSServer": "https://lcgfts.gridpp.rl.ac.uk:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "RAL.uk" }), FTSSite({ "FTSServer": "https://fts.grid.sara.nl:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "SARA.nl" }), FTSSite({ "FTSServer": "https://fts.cr.cnaf.infn.it:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "CNAF.it" }), FTSSite({ "FTSServer": "https://fts.grid.sara.nl:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "NIKHEF.nl" }), FTSSite({ "FTSServer": "https://fts-fzk.gridka.de:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "GRIDKA.de" }), FTSSite({ "FTSServer": "https://cclcgftsprod.in2p3.fr:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "IN2P3.fr" }) ] self.ftsFiles = [] for i in range(100): ftsFile = FTSFile() ftsFile.FileID = i + 1 ftsFile.OperationID = 9999 ftsFile.LFN = "/a/b/c/%d" % i ftsFile.Size = 10 ftsFile.SourceSE = "CERN-USER" ftsFile.TargetSE = "PIC-USER" ftsFile.SourceSURL = "foo://source.bar.baz/%s" % ftsFile.LFN ftsFile.TargetSURL = "foo://target.bar.baz/%s" % ftsFile.LFN ftsFile.Status = "Waiting" self.ftsFiles.append(ftsFile) ses = ["CERN-USER", "RAL-USER"] statuses = [ "Submitted", "Finished", "FinishedDirty", "Active", "Ready" ] self.submitted = 0 self.ftsJobs = [] for i in range(7200): ftsJob = FTSJob() ftsJob.FTSGUID = str(uuid.uuid4()) ftsJob.FTSServer = self.ftsSites[0].FTSServer ftsJob.Status = statuses[i % len(statuses)] if ftsJob.Status in FTSJob.FINALSTATES: ftsJob.Completeness = 100 if ftsJob.Status == "Active": ftsJob.Completeness = 90 ftsJob.SourceSE = ses[i % len(ses)] ftsJob.TargetSE = "PIC-USER" ftsFile = FTSFile() ftsFile.FileID = i + 1 ftsFile.OperationID = i + 1 ftsFile.LFN = "/a/b/c/%d" % i ftsFile.Size = 1000000 ftsFile.SourceSE = ftsJob.SourceSE ftsFile.TargetSE = ftsJob.TargetSE ftsFile.SourceSURL = "foo://source.bar.baz/%s" % ftsFile.LFN ftsFile.TargetSURL = "foo://target.bar.baz/%s" % ftsFile.LFN ftsFile.Status = "Waiting" if ftsJob.Status != "FinishedDirty" else "Failed" ftsFile.FTSGUID = ftsJob.FTSGUID if ftsJob.Status == "FinishedDirty": ftsJob.FailedFiles = 1 ftsJob.FailedSize = ftsFile.Size ftsJob.addFile(ftsFile) self.ftsJobs.append(ftsJob) self.submitted = len( [i for i in self.ftsJobs if i.Status == "Submitted"])
def testUserProfileDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue("DIRAC/Setup", "Test") gConfig.setOptionValue("/DIRAC/Setups/Test/Framework", "Test") host = "127.0.0.1" user = "******" pwd = "Dirac" db = "AccountingDB" gConfig.setOptionValue("/Systems/Framework/Test/Databases/UserProfileDB/Host", host) gConfig.setOptionValue("/Systems/Framework/Test/Databases/UserProfileDB/DBName", db) gConfig.setOptionValue("/Systems/Framework/Test/Databases/UserProfileDB/User", user) gConfig.setOptionValue("/Systems/Framework/Test/Databases/UserProfileDB/Password", pwd) db = UserProfileDB() assert db._connect()["OK"] userName = "******" userGroup = "testGroup" profileName = "testProfile" varName = "testVar" tagName = "testTag" hashTag = "237cadc4af90277e9524e6386e264630" data = "testData" perms = "USER" try: if False: for tableName in db.tableDict.keys(): result = db._update("DROP TABLE `%s`" % tableName) assert result["OK"] gLogger.info("\n Creating Table\n") # Make sure it is there and it has been created for this test result = db._checkTable() assert result == {"OK": True, "Value": None} result = db._checkTable() assert result == {"OK": True, "Value": 0} gLogger.info("\n Adding some data\n") result = db.storeVar(userName, userGroup, profileName, varName, data, perms) assert result["OK"] assert result["Value"] == 1 gLogger.info("\n Some queries\n") result = db.getUserGroupIds(userName, userGroup) assert result["OK"] assert result["Value"] == (1, 1, 1) result = db.listVars(userName, userGroup, profileName) assert result["OK"] assert result["Value"][0][3] == varName result = db.retrieveUserProfiles(userName, userGroup) assert result["OK"] assert result["Value"] == {profileName: {varName: data}} result = db.storeHashTag(userName, userGroup, tagName, hashTag) assert result["OK"] assert result["Value"] == hashTag result = db.retrieveAllHashTags(userName, userGroup) assert result["OK"] assert result["Value"] == {hashTag: tagName} result = db.retrieveHashTag(userName, userGroup, hashTag) assert result["OK"] assert result["Value"] == tagName gLogger.info("\n OK\n") except AssertionError: print "ERROR ", if not result["OK"]: print result["Message"] else: print result sys.exit(1)
def setUp(self): """set up the objects""" super(JobTestCase, self).setUp() clip = CLIParams() clip.testOverlay=True clip.testChain = True clip.testMokka = True clip.testInputData = True clip.testWhizard = True clip.testUtilities = True overlayrun = clip.testOverlay clip.testRoot = True myMarlinSteeringFile = "bbudsc_3evt_stdreco.xml" myLCSimPreSteeringFile = "clic_cdr_prePandoraOverlay_1400.0.lcsim" if overlayrun else "clic_cdr_prePandora.lcsim" myLCSimPostSteeringFile = "clic_cdr_postPandoraOverlay.lcsim" parameterDict = dict( mokkaVersion="ILCSoft-01-17-06", mokkaSteeringFile="bbudsc_3evt.steer", detectorModel="ILD_o1_v05", machine="ilc_dbd", backgroundType="aa_lowpt", energy=350.0, marlinVersion="ILCSoft-01-17-06", marlinSteeringFile=myMarlinSteeringFile, alwaysOverlay = True, marlinInputData="/ilc/user/s/sailer/testILDsim.slcio", ildConfig = "v01-16-p10_250", gearFile='GearOutput.xml', lcsimPreSteeringFile=myLCSimPreSteeringFile, lcsimPostSteeringFile=myLCSimPostSteeringFile, rootVersion="ILCSoft-01-17-08" ) self.myTests = TestCreater(clip, parameterDict) # Differentiate between local execution and execution in docker localsitelocalarea = '' if os.path.exists("/home/jebbing/"): localsitelocalarea = "/home/jebbing/cvmfstests" else: localsitelocalarea = os.path.join(os.getcwd(), "cvmfstests" ) gConfig.setOptionValue( '/LocalSite/LocalArea', localsitelocalarea) gConfig.setOptionValue( '/LocalSite/LocalSE', "CERN-DIP-4" ) gConfig.setOptionValue( '/Operations/Defaults/AvailableTarBalls/x86_64-slc5-gcc43-opt/steeringfiles/V16/Overwrite', 'False' ) gConfig.setOptionValue( '/Operations/Defaults/AvailableTarBalls/x86_64-slc5-gcc43-opt/steeringfiles/V18/Overwrite', 'False' ) gConfig.setOptionValue( '/Operations/Defaults/AvailableTarBalls/x86_64-slc5-gcc43-opt/stdhepcutjava/1.0/Overwrite', 'False' ) gConfig.setOptionValue( '/Resources/Countries/local/AssignedTo' , 'ch' )
def testSystemLoggingDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue( 'DIRAC/Setup', 'Test' ) gConfig.setOptionValue( '/DIRAC/Setups/Test/Framework', 'Test' ) host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Host', host ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/DBName', db ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/User', user ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Password', pwd ) from DIRAC.FrameworkSystem.private.logging.Message import tupleToMessage systemName = 'TestSystem' subSystemName = 'TestSubSystem' level = 10 time = Time.toString() msgTest = 'Hello' variableText = time frameInfo = "" message = tupleToMessage( ( systemName, level, time, msgTest, variableText, frameInfo, subSystemName ) ) site = 'somewehere' longSite = 'somewehere1234567890123456789012345678901234567890123456789012345678901234567890' nodeFQDN = '127.0.0.1' userDN = 'Yo' userGroup = 'Us' remoteAddress = 'elsewhere' records = 10 db = SystemLoggingDB() assert db._connect()['OK'] try: if False: for tableName in db.tableDict.keys(): result = db._update( 'DROP TABLE IF EXISTS `%s`' % tableName ) assert result['OK'] gLogger.info( '\n Creating Table\n' ) # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info( '\n Inserting some records\n' ) for k in range( records ): result = db.insertMessage( message, site, nodeFQDN, userDN, userGroup, remoteAddress ) assert result['OK'] assert result['lastRowId'] == k + 1 assert result['Value'] == 1 result = db.insertMessage( message, longSite, nodeFQDN, userDN, userGroup, remoteAddress ) assert not result['OK'] result = db._queryDB( showFieldList = [ 'SiteName' ] ) assert result['OK'] assert result['Value'][0][0] == site result = db._queryDB( showFieldList = [ 'SystemName' ] ) assert result['OK'] assert result['Value'][0][0] == systemName result = db._queryDB( showFieldList = [ 'SubSystemName' ] ) assert result['OK'] assert result['Value'][0][0] == subSystemName result = db._queryDB( showFieldList = [ 'OwnerGroup' ] ) assert result['OK'] assert result['Value'][0][0] == userGroup result = db._queryDB( showFieldList = [ 'FixedTextString' ] ) assert result['OK'] assert result['Value'][0][0] == msgTest result = db._queryDB( showFieldList = [ 'VariableText', 'SiteName' ], count = True, groupColumn = 'VariableText' ) assert result['OK'] assert result['Value'][0][1] == site assert result['Value'][0][2] == records gLogger.info( '\n Removing Table\n' ) for tableName in [ 'MessageRepository', 'FixedTextMessages', 'SubSystems', 'Systems', 'AgentPersistentData', 'ClientIPs', 'Sites', 'UserDNs' ]: result = db._update( 'DROP TABLE `%s`' % tableName ) assert result['OK'] gLogger.info( '\n OK\n' ) except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit( 1 )
def disableDevMode( op ): gConfig.setOptionValue( "/WebApp/DevelopMode", "False" ) return S_OK()
result = singleDiracBenchmark(1) if result is None: gLogger.error('Cannot make benchmark measurements') DIRAC.exit(1) db12Measured = round(result['NORM'], 1) corr = Operations().getValue('JobScheduling/CPUNormalizationCorrection', 1.) norm = round(result['NORM'] / corr, 1) gLogger.notice('Estimated CPU power is %.1f HS06' % norm) if update: gConfig.setOptionValue( '/LocalSite/CPUScalingFactor', hs06JobFeature if hs06JobFeature else norm) # deprecate? gConfig.setOptionValue('/LocalSite/CPUNormalizationFactor', norm) # deprecate? gConfig.setOptionValue('/LocalSite/DB12measured', db12Measured) # Set DB12 to use by default. Remember db12JobFeature is still in /LocalSite/JOBFEATURES/db12 if db12JobFeature is not None: gConfig.setOptionValue('/LocalSite/DB12', db12JobFeature) else: gConfig.setOptionValue('/LocalSite/DB12', db12Measured) if configFile: gConfig.dumpLocalCFGToFile(configFile) else: gConfig.dumpLocalCFGToFile(gConfig.diracConfigFilePath)
def test(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue( 'DIRAC/Setup', 'Test' ) gConfig.setOptionValue( '/DIRAC/Setups/Test/DataManagement', 'Test' ) host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Host', host ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/DBName', db ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/User', user ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Password', pwd ) db = DataLoggingDB() assert db._connect()['OK'] lfns = ['/Test/00001234/File1', '/Test/00001234/File2'] status = 'TestStatus' minor = 'MinorStatus' date1 = Time.toString() date2 = Time.dateTime() source = 'Somewhere' fileTuples = ( ( lfns[0], status, minor, date1, source ), ( lfns[1], status, minor, date2, source ) ) try: gLogger.info( '\n Creating Table\n' ) # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info( '\n Inserting some records\n' ) result = db.addFileRecord( lfns, status, date = '2012-04-28 09:49:02.545466' ) assert result['OK'] assert result['Value'] == 2 assert result['lastRowId'] == 2 result = db.addFileRecords( fileTuples ) assert result['OK'] gLogger.info( '\n Retrieving some records\n' ) result = db.getFileLoggingInfo( lfns[0] ) assert result['OK'] assert len( result['Value'] ) == 2 result = db.getFileLoggingInfo( lfns[1] ) assert result['OK'] assert len( result['Value'] ) == 2 result = db.getUniqueStates() assert result['OK'] assert result['Value'] == [status] gLogger.info( '\n Removing Table\n' ) result = db._update( 'DROP TABLE `%s`' % db.tableName ) assert result['OK'] gLogger.info( '\n OK\n' ) except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit( 1 )
def setUp( self ): """ test case set up """ # ## set some defaults gConfig.setOptionValue( 'DIRAC/Setup', 'Test' ) gConfig.setOptionValue( '/DIRAC/Setups/Test/DataManagement', 'Test' ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/FTSDB/Host', 'localhost' ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/FTSDB/DBName', 'FTSDB' ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/FTSDB/User', 'Dirac' ) self.ftsSites = [ FTSSite( { "FTSServer": "https://fts22-t0-export.cern.ch:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "CERN.ch" } ), FTSSite( { "FTSServer": "https://fts.pic.es:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "PIC.es" } ), FTSSite( { "FTSServer": "https://lcgfts.gridpp.rl.ac.uk:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "RAL.uk" } ), FTSSite( { "FTSServer": "https://fts.grid.sara.nl:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "SARA.nl" } ), FTSSite( { "FTSServer": "https://fts.cr.cnaf.infn.it:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "CNAF.it" } ), FTSSite( { "FTSServer": "https://fts.grid.sara.nl:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "NIKHEF.nl" } ), FTSSite( { "FTSServer": "https://fts-fzk.gridka.de:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "GRIDKA.de" } ), FTSSite( { "FTSServer": "https://cclcgftsprod.in2p3.fr:8443/glite-data-transfer-fts/services/FileTransfer", "Name": "IN2P3.fr" } ) ] self.ftsFiles = [] for i in range ( 100 ): ftsFile = FTSFile() ftsFile.FileID = i + 1 ftsFile.OperationID = 9999 ftsFile.LFN = "/a/b/c/%d" % i ftsFile.Size = 10 ftsFile.SourceSE = "CERN-USER" ftsFile.TargetSE = "PIC-USER" ftsFile.SourceSURL = "foo://source.bar.baz/%s" % ftsFile.LFN ftsFile.TargetSURL = "foo://target.bar.baz/%s" % ftsFile.LFN ftsFile.Status = "Waiting" self.ftsFiles.append( ftsFile ) ses = [ "CERN-USER", "RAL-USER" ] statuses = [ "Submitted", "Finished", "FinishedDirty", "Active", "Ready" ] self.submitted = 0 self.ftsJobs = [] for i in range( 7200 ): ftsJob = FTSJob() ftsJob.FTSGUID = str( uuid.uuid4() ) ftsJob.FTSServer = self.ftsSites[0].FTSServer ftsJob.Status = statuses[ i % len( statuses ) ] if ftsJob.Status in FTSJob.FINALSTATES: ftsJob.Completeness = 100 if ftsJob.Status == "Active": ftsJob.Completeness = 90 ftsJob.SourceSE = ses[ i % len( ses ) ] ftsJob.TargetSE = "PIC-USER" ftsFile = FTSFile() ftsFile.FileID = i + 1 ftsFile.OperationID = i + 1 ftsFile.LFN = "/a/b/c/%d" % i ftsFile.Size = 1000000 ftsFile.SourceSE = ftsJob.SourceSE ftsFile.TargetSE = ftsJob.TargetSE ftsFile.SourceSURL = "foo://source.bar.baz/%s" % ftsFile.LFN ftsFile.TargetSURL = "foo://target.bar.baz/%s" % ftsFile.LFN ftsFile.Status = "Waiting" if ftsJob.Status != "FinishedDirty" else "Failed" ftsFile.FTSGUID = ftsJob.FTSGUID if ftsJob.Status == "FinishedDirty": ftsJob.FailedFiles = 1 ftsJob.FailedSize = ftsFile.Size ftsJob.addFile( ftsFile ) self.ftsJobs.append( ftsJob ) self.submitted = len( [ i for i in self.ftsJobs if i.Status == "Submitted" ] )
def setUp(self): """ set up :param self: self reference """ self.log = gLogger.getSubLogger(self.__class__.__name__) if not self.__db: gConfig.setOptionValue("/DIRAC/Setup", "Test") gConfig.setOptionValue("/DIRAC/Setups/Test/DataManagement", "Test") spath = "/Systems/DataManagement/Test/Databases/DataLoggingDB" gConfig.setOptionValue("%s/%s" % (spath, "Host"), "127.0.0.1") gConfig.setOptionValue("%s/%s" % (spath, "DBName"), "AccountingDB") gConfig.setOptionValue("%s/%s" % (spath, "User"), "Dirac") gConfig.setOptionValue("%s/%s" % (spath, "Password"), "Dirac") self.__db = DataLoggingDB()