class MAUITestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.diagPattern = """Displaying group information... Name Priority Flags QDef QOSList* PartitionList Target Limits %(group)s 0 [NONE] [NONE] [NONE] [NONE] 0.00 %(limit)s DEFAULT 0 [NONE] [NONE] [NONE] [NONE] 0.00 [NONE] """ def test_diagnose_limited_ok(self): pattern_args = {"group": "dteam", "limit": "MAXJOB=50 MAXPROC=4"} tmpfile = self.workspace.createFile(self.diagPattern % pattern_args) container = MAUIHandler.parseJobLimit(None, None, tmpfile) self.assertTrue("dteam" in container.limitTable and container.limitTable["dteam"] == 50) def test_diagnose_unlimited_ok(self): pattern_args = {"group": "dteam", "limit": "[NONE]"} tmpfile = self.workspace.createFile(self.diagPattern % pattern_args) container = MAUIHandler.parseJobLimit(None, None, tmpfile) self.assertTrue(len(container.limitTable) == 0)
class MAUITestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.diagPattern = '''Displaying group information... Name Priority Flags QDef QOSList* PartitionList Target Limits %(group)s 0 [NONE] [NONE] [NONE] [NONE] 0.00 %(limit)s DEFAULT 0 [NONE] [NONE] [NONE] [NONE] 0.00 [NONE] ''' def test_diagnose_limited_ok(self): pattern_args = {'group': 'dteam', 'limit': 'MAXJOB=50 MAXPROC=4'} tmpfile = self.workspace.createFile(self.diagPattern % pattern_args) container = MAUIHandler.parseJobLimit(None, None, tmpfile) self.assertTrue('dteam' in container.limitTable and container.limitTable['dteam'] == 50) def test_diagnose_unlimited_ok(self): pattern_args = {'group': 'dteam', 'limit': '[NONE]'} tmpfile = self.workspace.createFile(self.diagPattern % pattern_args) container = MAUIHandler.parseJobLimit(None, None, tmpfile) self.assertTrue(len(container.limitTable) == 0)
def setUp(self): self.workspace = Workspace() self.nodePattern = '''NodeName=%(nname)s Arch=x86_64 CoresPerSocket=1 CPUAlloc=%(calloc)d CPUErr=0 CPUTot=%(ctot)d CPULoad=0.00 Features=(null) Gres=(null) NodeAddr=%(nname)s NodeHostName=%(nname)s OS=Linux RealMemory=1 AllocMem=0 Sockets=2 Boards=1 State=%(nstate)s ThreadsPerCore=1 TmpDisk=0 Weight=1 BootTime=2013-08-23T09:49:03 SlurmdStartTime=2013-08-23T10:04:46 CurrentWatts=0 LowestJoules=0 ConsumedJoules=0 ExtSensorsJoules=n/s ExtSensorsWatts=0 ExtSensorsTemp=n/s ''' self.jobPattern = '''JobId=%(jid)s Name=%(jname)s UserId=%(uid)s(0) GroupId=%(gid)s(0) Priority=4294901756 Account=(null) QOS=(null) JobState=%(jstate)s Reason=None Dependency=(null) Requeue=1 Restarts=0 BatchFlag=1 ExitCode=0:0 RunTime=00:01:00 TimeLimit=%(tlimit)s TimeMin=N/A SubmitTime=%(subtime)s EligibleTime=2013-08-26T11:54:52 StartTime=%(sttime)s EndTime=2013-08-26T11:55:52 PreemptTime=None SuspendTime=None SecsPreSuspend=0 Partition=%(pname)s AllocNode:Sid=cream-04:2682 ReqNodeList=(null) ExcNodeList=(null) NodeList=cream-42 BatchHost=cream-42 NumNodes=1 NumCPUs=%(ncpu)d CPUs/Task=1 ReqS:C:T=*:*:* MinCPUsNode=1 MinMemoryNode=0 MinTmpDiskNode=0 Features=(null) Gres=(null) Reservation=(null) Shared=0 Contiguous=0 Licenses=(null) Network=(null) Command=/root/test.sh WorkDir=/root ''' self.configPattern = '''Configuration data as of 2013-08-28T10:34:42
def test_getMaxJobsTable_nofile(self): try: workspace = Workspace(vomap = self.vomap) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) result = DynSchedUtils.getMaxJobsTable(config) except DynSchedUtils.UtilsException, test_error: msg = str(test_error) self.assertTrue(msg.startswith("Error running"))
def test_getMaxJobsTable_ok(self): workspace = Workspace(vomap = self.vomap) workspace.setMaxJobCmd(self.mjTable) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) result = DynSchedUtils.getMaxJobsTable(config) self.assertTrue('atlas' in result and result['atlas'] == 50 and 'dteam' in result and result['dteam'] == 150 and 'infngrid' in result and result['infngrid'] == 360)
def test_getMaxJobsTable_wrongexit(self): try: workspace = Workspace(vomap = self.vomap) script = """#!/bin/bash exit 1 """ workspace.setMaxJobCmd(script) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) result = DynSchedUtils.getMaxJobsTable(config) except DynSchedUtils.UtilsException, test_error: msg = str(test_error) self.assertTrue(msg.startswith("VO max jobs backend command returned"))
class SAcctMgrTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() def test_policies_parsing_ok(self): tmpbuff = 'alice|alice001|creamtest2|1|20||1-12|1440|2|601|600\n' tmpbuff += 'alice|alice001|creamtest1|1|20||1-12|1440|2|602|600\n' tmpbuff += 'dteam|dteam001|creamtest1|1|20||12:00:00|2880|2|701|700\n' tmpbuff += 'atlas|atlas001||1|20||12:00:00|2880|4|801|800\n' tmpfile = self.workspace.createFile(tmpbuff) container = parsePolicies(tmpfile) try: tmpPol = container.policyTable['alice', 'creamtest2'] result = tmpPol.maxWallTime == 129600 and tmpPol.maxCPUTime == 86400 and tmpPol.maxCPUPerJob == 2 tmpPol = container.policyTable[None, 'creamtest1'] result = result and tmpPol.maxWallTime == 129600 and tmpPol.maxCPUTime == 172800 and tmpPol.maxCPUPerJob == 2 tmpPol = container.policyTable['atlas', None] result = result and tmpPol.maxWallTime == 43200 and tmpPol.maxCPUTime == 172800 and tmpPol.maxCPUPerJob == 4 except: etype, evalue, etraceback = sys.exc_info() sys.excepthook(etype, evalue, etraceback) result = False self.assertTrue(result)
def test_analyze_err_from_script(self): try: workspace = Workspace(vomap = self.vomap) script = """#!/usr/bin/python import sys sys.stderr.write("Dummy error message") sys.exit(1) """ workspace.setLRMSCmd(script) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) collector = Analyzer.analyze(config, {}) self.fail("Exception not handled") except Analyzer.AnalyzeException, test_error: msg = str(test_error) self.assertTrue(msg.startswith("Dummy error message"))
def test_process_missingce(self): try: glueceuniqueid = 'GlueCEUniqueID=cream-38.pd.infn.it:8443/cream-pbs-creamtest1,mds-vo-name=resource,o=grid' gluevoviewid = 'GlueVOViewLocalID=dteam,' + glueceuniqueid ldif = """ dn: %s GlueVOViewLocalID: dteam GlueChunkKey: GlueCEUniqueID=cream-38.pd.infn.it:8443/cream-pbs-creamtest1 GlueCEAccessControlBaseRule: VO:dteam """ % gluevoviewid workspace = Workspace(vomap = self.vomap) workspace.setLRMSCmd(self._script()) workspace.setGLUE1StaticFile(ldif) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) dOut = DummyOutput() collector = Analyzer.analyze(config, {}) GLUE1Handler.process(config, collector, dOut) self.fail("No exception detected") except GLUE1Handler.GLUE1Exception, glue_error: msg = str(glue_error) self.assertTrue(msg.startswith("Invalid foreign key"))
def test_process_ok(self): glueceuniqueid = 'GlueCEUniqueID=cream-38.pd.infn.it:8443/cream-pbs-creamtest1,mds-vo-name=resource,o=grid' gluevoviewid = 'GlueVOViewLocalID=dteam,' + glueceuniqueid ldif = """ dn: %s GlueVOViewLocalID: dteam GlueChunkKey: GlueCEUniqueID=cream-38.pd.infn.it:8443/cream-pbs-creamtest1 GlueCEAccessControlBaseRule: VO:dteam dn: %s GlueCEUniqueID: cream-38.pd.infn.it:8443/cream-pbs-creamtest1 GlueCEName: creamtest1 GlueCEAccessControlBaseRule: VO:infngrid GlueCEAccessControlBaseRule: VO:dteam """ % (gluevoviewid, glueceuniqueid) workspace = Workspace(vomap = self.vomap) workspace.setLRMSCmd(self._script()) workspace.setGLUE1StaticFile(ldif) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) dOut = DummyOutput() collector = Analyzer.analyze(config, {}) GLUE1Handler.process(config, collector, dOut) result = dOut.queued[glueceuniqueid] == 2 result = result and dOut.running[glueceuniqueid] == 4 result = result and dOut.queued[gluevoviewid] == 1 result = result and dOut.running[gluevoviewid] == 2 self.assertTrue(result)
def test_process_ok(self): glue2shareid = 'GLUE2ShareID=creamtest1_dteam_abc,GLUE2ServiceID=abc,GLUE2GroupID=resource,o=glue' ldif = """ dn: GLUE2PolicyID=creamtest1_dteam_abc_policy,%s GLUE2PolicyUserDomainForeignKey: dteam GLUE2MappingPolicyShareForeignKey: creamtest1_dteam_abc dn: %s GLUE2ShareID: creamtest1_dteam_abc GLUE2ComputingShareMappingQueue: creamtest1 """ % (glue2shareid, glue2shareid) workspace = Workspace(vomap = self.vomap) workspace.setLRMSCmd(self._script()) workspace.setGLUE2StaticFile(ldif) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) dOut = DummyOutput() collector = Analyzer.analyze(config, {}) GLUE2Handler.process(config, collector, dOut) result = dOut.queued[glue2shareid] == 1 result = result and dOut.running[glue2shareid] == 2 self.assertTrue(result)
def test_process_missing_vo_in_policy(self): try: glue2shareid = 'GLUE2ShareID=creamtest1_dteam_abc,GLUE2ServiceID=abc,GLUE2GroupID=resource,o=glue' ldif = """ dn: %s GLUE2ShareID: creamtest1_dteam_abc GLUE2ComputingShareMappingQueue: creamtest1 dn: GLUE2PolicyID=creamtest1_dteam_abc_policy,%s GLUE2MappingPolicyShareForeignKey: creamtest1_dteam_abc """ % (glue2shareid, glue2shareid) workspace = Workspace(vomap = self.vomap) workspace.setLRMSCmd(self._script()) workspace.setGLUE2StaticFile(ldif) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) collector = Analyzer.analyze(config, {}) GLUE2Handler.process(config, collector, DummyOutput()) self.fail("No exception detected") except GLUE2Handler.GLUE2Exception, glue_error: msg = str(glue_error) self.assertTrue(msg == "Missing mandatory attribute GLUE2PolicyUserDomainForeignKey")
def test_analyze_with_maxjobforvo(self): jTable = [ ("atlasprod", "creamtest1", 'running', 1327564866, "creXX_23081970"), ("dteamgold", 'creamtest2', 'running', 1327566866, "creXX_23081972"), ("dteamgold", "creamtest1", 'running', 1327567866, "creXX_23081973"), ("infngridlow", 'creamtest1', 'running', 1327569866, "creXX_23081975"), ("infngridlow", 'creamtest2', 'running', 1327570866, "creXX_23081976"), ("infngridhigh", 'creamtest2', 'running', 1327572866, "creXX_23081978") ] workspace = Workspace(vomap = self.vomap) script = self.headerfmt % (10, 4, 1327574866, 26) for jItem in jTable: script += self.dictfmt % jItem script += self.footerfmt mJobTable = {'dteam': 5, 'atlas': 5, 'infngrid':5} workspace.setLRMSCmd(script) workspace.setMaxJobCmd(mJobTable) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) collector = Analyzer.analyze(config, mJobTable) self.assertTrue(collector.freeSlots(None, 'dteam') == 3)
def test_analyze_ok(self): jTable = [ ("atlasprod", "creamtest1", 'running', 1327564866, "creXX_23081970"), ("atlasprod", 'creamtest2', 'queued', 1327565866, "creXX_23081971"), ("dteamgold", 'creamtest2', 'running', 1327566866, "creXX_23081972"), ("dteamgold", "creamtest1", 'running', 1327567866, "creXX_23081973"), ("dteamgold", 'creamtest2', 'queued', 1327568866, "creXX_23081974"), ("infngridlow", 'creamtest1', 'running', 1327569866, "creXX_23081975"), ("infngridlow", 'creamtest2', 'running', 1327570866, "creXX_23081976"), ("infngridhigh", 'creamtest1', 'running', 1327571866, "creXX_23081977"), ("infngridhigh", 'creamtest2', 'running', 1327572866, "creXX_23081978"), ("infngridhigh", 'creamtest1', 'queued', 1327573866, "creXX_23081979") ] workspace = Workspace(vomap = self.vomap) script = self.headerfmt % (5, 0, 1327574866, 26) for jItem in jTable: script += self.dictfmt % jItem script += self.footerfmt workspace.setLRMSCmd(script) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) collector = Analyzer.analyze(config, {}) result = collector.runningJobsForVO('atlas') == 1 result = result and collector.queuedJobsForVO('atlas') == 1 result = result and collector.runningJobsForVO('dteam') == 2 result = result and collector.queuedJobsForVO('dteam') == 1 result = result and collector.runningJobsForVO('infngrid') == 4 result = result and collector.queuedJobsForVO('infngrid') == 1 self.assertTrue(result)
def test_process_missing_share(self): try: ldif = """ dn: GLUE2PolicyID=creamtest1_dteam_abc_policy,GLUE2ShareID=creamtest1_dteam_abc,GLUE2ServiceID=abc,GLUE2GroupID=resource,o=glue GLUE2PolicyUserDomainForeignKey: dteam GLUE2MappingPolicyShareForeignKey: creamtest1_dteam_abc """ workspace = Workspace(vomap = self.vomap) workspace.setLRMSCmd(self._script()) workspace.setGLUE2StaticFile(ldif) cfgfile = workspace.getConfigurationFile() config = DynSchedUtils.readConfigurationFromFile(cfgfile) collector = Analyzer.analyze(config, {}) GLUE2Handler.process(config, collector, DummyOutput()) self.fail("No exception detected") except GLUE2Handler.GLUE2Exception, glue_error: msg = str(glue_error) self.assertTrue(msg.startswith("Invalid foreign key"))
class QStatTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.srvPattern = '''# qstat -B -f Server: cert-34.pd.infn.it server_state = Active scheduling = True total_jobs = 0 state_count = Transit:0 Queued:3 Held:0 Waiting:-3 Running:0 Exiting:0 acl_host_enable = False acl_hosts = cert-34.pd.infn.it managers = [email protected] operators = [email protected] default_queue = dteam log_events = 511 mail_from = adm query_other_jobs = True resources_assigned.nodect = 0 scheduler_iteration = 600 node_check_rate = 150 tcp_timeout = 6 default_node = lcgpro node_pack = False mail_domain = never pbs_version = %(lrmsver)s kill_delay = 10 next_job_number = 11 net_counter = 3 0 0 authorized_users = *@cert-34.pd.infn.it ''' self.jobPattern = '''# item from qstat -f Job Id: %(jserial)s.cert-34.pd.infn.it Job_Name = %(jname)s Job_Owner = [email protected] job_state = %(jstate)s queue = %(queue)s euser = dteam013 egroup = dteam qtime = %(qtime)s Resource_List.walltime = 36:00:00 %(pair1)s server = cert-34.pd.infn.it Checkpoint = u ctime = Wed Aug 21 11:37:25 2013 Error_Path = cert-34.pd.infn.it:/dev/null Hold_Types = n Join_Path = n Keep_Files = n Mail_Points = n mtime = Wed Aug 21 11:37:25 2013 Output_Path = cert-34.pd.infn.it:/dev/null Priority = 0 Rerunable = True Resource_List.neednodes = 1 Resource_List.nodect = 1 Resource_List.nodes = 1 Shell_Path_List = /bin/bash stagein = CREAM921657923_jobWrapper.sh.18190.15697.1377077844@cert-34.pd.infn.it stageout = [email protected] substate = 11 Variable_List = PBS_O_QUEUE=cert,PBS_O_HOME=/home/dteam013, PBS_O_LANG=en_US.UTF-8,PBS_O_LOGNAME=dteam013, PBS_O_PATH=/usr/kerberos/bin:/bin:/usr/bin:/home/dteam013/bin, PBS_O_MAIL=/var/spool/mail/dteam013,PBS_O_SHELL=/bin/sh, PBS_O_HOST=cert-34.pd.infn.it,PBS_SERVER=cert-34.pd.infn.it, PBS_O_WORKDIR=/var/tmp queue_rank = 23 queue_type = E etime = Wed Aug 21 11:37:25 2013 submit_args = /tmp/cream_921657923 fault_tolerant = False submit_host = cert-34.pd.infn.it init_work_dir = /var/tmp ''' self.queuePattern = '''# item from qstat -Q -f Queue: %(queue)s queue_type = Execution total_jobs = 0 state_count = Transit:0 Queued:0 Held:0 Waiting:0 Running:0 Exiting:0 resources_max.cput = %(maxcpu)s resources_max.walltime = %(maxwt)s acl_group_enable = True acl_groups = dteam,infngrid,testers mtime = 1375189536 resources_assigned.nodect = 0 enabled = True started = True ''' def test_lrmsver_ok(self): pattern = self.srvPattern % {'lrmsver': '2.5.7'} tmpfile = self.workspace.createFile(pattern) self.assertTrue( QStatHandler.parseLRMSVersion(None, tmpfile) == '2.5.7') def test_lrmsver_missing(self): pattern = self.srvPattern.replace('pbs_version', 'no_version') tmpfile = self.workspace.createFile(pattern) self.assertTrue(QStatHandler.parseLRMSVersion(None, tmpfile) == None) def test_parse_job_qtime_ok(self): pattern_args = { 'jserial': '01', 'jname': 'cream_921657923', 'jstate': 'Q', 'qtime': 'Wed Aug 21 11:37:25 2013', 'queue': 'cert', 'pair1': 'dummy1 = None' } tmpfile = self.workspace.createFile(self.jobPattern % pattern_args) pattern_args = { 'jserial': '02', 'jname': 'cream_921657924', 'jstate': 'Q', 'qtime': 'Wed Aug 21 11:37:30 2013', 'queue': 'cert', 'pair1': 'dummy1 = None' } self.workspace.appendToFile(self.jobPattern % pattern_args, tmpfile) outList = list() QStatHandler.parse(outList, None, tmpfile) qtimeCount = 0 for jtable in outList: if jtable['qtime'] == 1377074245 or jtable['qtime'] == 1377074250: qtimeCount += 1 self.assertTrue(qtimeCount == 2) def test_parse_job_stime_ok(self): pattern_args = { 'jserial': '01', 'jname': 'cream_921657923', 'jstate': 'R', 'qtime': 'Wed Aug 21 11:37:25 2013', 'queue': 'cert', 'pair1': 'start_time = Wed Aug 21 11:37:26 2013' } tmpfile = self.workspace.createFile(self.jobPattern % pattern_args) pattern_args = { 'jserial': '02', 'jname': 'cream_921657924', 'jstate': 'Q', 'qtime': 'Wed Aug 21 11:37:30 2013', 'queue': 'cert', 'pair1': 'dummy1 = None' } self.workspace.appendToFile(self.jobPattern % pattern_args, tmpfile) outList = list() QStatHandler.parse(outList, None, tmpfile) stimeCount = 0 for jtable in outList: try: if jtable['start'] == 1377074246 and jtable[ 'startAnchor'] == 'start_time': stimeCount += 1 except: pass self.assertTrue(stimeCount == 1) def test_parse_queue_ok(self): pattern_args = { 'queue': 'cert', 'maxcpu': '24:00:00', 'maxwt': '36:00:00' } tmpfile = self.workspace.createFile(self.queuePattern % pattern_args) container = QStatHandler.parseQueueInfo('cert', None, tmpfile) result = container.maxCPUtime == 86400 result = result and container.maxWallTime == 129600 self.assertTrue(result)
class SInfoTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.partPattern = "%(partid)s %(state)s %(cpuinfo)s %(maxcput)s %(defcput)s" self.partPattern += " %(jsize)s %(nodes)s %(maxcpun)s %(sct)s\n" def test_partition_ok(self): pattern_args = {'partid' : 'creamtest1', 'state' : 'up', 'cpuinfo' : '0/2/0/2', 'maxcput' : '30:00', 'defcput' : 'n/a', 'jsize' : '1-infinite', 'nodes' : '0/1/0/1', 'maxcpun' : 'UNLIMITED', 'sct' : '2:1:1'} tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2*' pattern_args['defcput'] = '15:00' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].maxRuntime == 1800 result = result and container['creamtest1'].defaultRuntime == 1800 result = result and container['creamtest1'].state == 'Production' result = result and container['creamtest2'].maxRuntime == 1800 result = result and container['creamtest2'].defaultRuntime == 900 result = result and container['creamtest2'].state == 'Production' self.assertTrue(result) def test_partition_one_closed(self): pattern_args = {'partid' : 'creamtest1', 'state' : 'down', 'cpuinfo' : '0/2/0/2', 'maxcput' : '30:00', 'defcput' : 'n/a', 'jsize' : '1-infinite', 'nodes' : '0/1/0/1', 'maxcpun' : 'UNLIMITED', 'sct' : '2:1:1'} tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2*' pattern_args['state'] = 'up' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].state == 'Closed' result = result and container['creamtest2'].state == 'Production' self.assertTrue(result) def test_partition_cpucount_ok(self): pattern_args = {'partid' : 'creamtest1', 'state' : 'up', 'cpuinfo' : '2/2/0/4', 'maxcput' : '30:00', 'defcput' : 'n/a', 'jsize' : '1-infinite', 'nodes' : '0/1/0/1', 'maxcpun' : 'UNLIMITED', 'sct' : '2:1:1'} tmpfile = self.workspace.createFile(self.partPattern % pattern_args) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].freeCPU == 2 result = result and container['creamtest1'].totalCPU == 4 self.assertTrue(result) def test_maxslot_node_ok(self): pattern_args = {'partid' : 'creamtest1', 'state' : 'up', 'cpuinfo' : '2/2/0/4', 'maxcput' : '30:00', 'defcput' : 'n/a', 'jsize' : '1-10', 'nodes' : '0/1/0/1', 'maxcpun' : 'UNLIMITED', 'sct' : '2:1:1'} tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2' pattern_args['jsize'] = '2' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].slotsPerJob == 20 result = result and container['creamtest2'].slotsPerJob == 4 self.assertTrue(result) def test_maxslot_cr_cpu_ok(self): pattern_args = {'partid' : 'creamtest1', 'state' : 'up', 'cpuinfo' : '2/2/0/4', 'maxcput' : '30:00', 'defcput' : 'n/a', 'jsize' : '1-10', 'nodes' : '0/1/0/1', 'maxcpun' : 'UNLIMITED', 'sct' : '2:2:1'} tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2' pattern_args['jsize'] = '2' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() config.slotType = 'CPU' container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].slotsPerJob == 40 result = result and container['creamtest2'].slotsPerJob == 8 self.assertTrue(result) def test_maxslot_undef_ok(self): pattern_args = {'partid' : 'creamtest1', 'state' : 'up', 'cpuinfo' : '2/2/0/4', 'maxcput' : '30:00', 'defcput' : 'n/a', 'jsize' : '1-unlimited', 'nodes' : '0/1/0/1', 'maxcpun' : 'UNLIMITED', 'sct' : '2:2:1'} tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2' pattern_args['jsize'] = '2' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() config.slotType = 'CPU' container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].slotsPerJob == -1 result = result and container['creamtest2'].slotsPerJob == 8 self.assertTrue(result)
def setUp(self): self.workspace = Workspace() self.srvPattern = '''# qstat -B -f Server: cert-34.pd.infn.it server_state = Active scheduling = True total_jobs = 0 state_count = Transit:0 Queued:3 Held:0 Waiting:-3 Running:0 Exiting:0 acl_host_enable = False acl_hosts = cert-34.pd.infn.it managers = [email protected] operators = [email protected] default_queue = dteam log_events = 511 mail_from = adm query_other_jobs = True resources_assigned.nodect = 0 scheduler_iteration = 600 node_check_rate = 150 tcp_timeout = 6 default_node = lcgpro node_pack = False mail_domain = never pbs_version = %(lrmsver)s kill_delay = 10 next_job_number = 11 net_counter = 3 0 0 authorized_users = *@cert-34.pd.infn.it ''' self.jobPattern = '''# item from qstat -f Job Id: %(jserial)s.cert-34.pd.infn.it Job_Name = %(jname)s Job_Owner = [email protected] job_state = %(jstate)s queue = %(queue)s euser = dteam013 egroup = dteam qtime = %(qtime)s Resource_List.walltime = 36:00:00 %(pair1)s server = cert-34.pd.infn.it Checkpoint = u ctime = Wed Aug 21 11:37:25 2013 Error_Path = cert-34.pd.infn.it:/dev/null Hold_Types = n Join_Path = n Keep_Files = n Mail_Points = n mtime = Wed Aug 21 11:37:25 2013 Output_Path = cert-34.pd.infn.it:/dev/null Priority = 0 Rerunable = True Resource_List.neednodes = 1 Resource_List.nodect = 1 Resource_List.nodes = 1 Shell_Path_List = /bin/bash stagein = CREAM921657923_jobWrapper.sh.18190.15697.1377077844@cert-34.pd.infn.it stageout = [email protected] substate = 11 Variable_List = PBS_O_QUEUE=cert,PBS_O_HOME=/home/dteam013, PBS_O_LANG=en_US.UTF-8,PBS_O_LOGNAME=dteam013, PBS_O_PATH=/usr/kerberos/bin:/bin:/usr/bin:/home/dteam013/bin, PBS_O_MAIL=/var/spool/mail/dteam013,PBS_O_SHELL=/bin/sh, PBS_O_HOST=cert-34.pd.infn.it,PBS_SERVER=cert-34.pd.infn.it, PBS_O_WORKDIR=/var/tmp queue_rank = 23 queue_type = E etime = Wed Aug 21 11:37:25 2013 submit_args = /tmp/cream_921657923 fault_tolerant = False submit_host = cert-34.pd.infn.it init_work_dir = /var/tmp ''' self.queuePattern = '''# item from qstat -Q -f
def setUp(self): self.workspace = Workspace() self.partPattern = "%(partid)s %(state)s %(cpuinfo)s %(maxcput)s %(defcput)s" self.partPattern += " %(jsize)s %(nodes)s %(maxcpun)s %(sct)s\n"
class PBSNodesTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.pbsnodesPattern = '''# item from pbsnodes -a %(host)s.pn.pd.infn.it state = %(state)s np = %(np)s properties = lcgpro ntype = cluster status = rectime=1376989178,varattr=,jobs=,state=free,netload=23614350258,gres=,loadave=0.00 gpus = 0 ''' def test_parse_all_free(self): pattern_args = {'host': 'cert-wn64-01', 'state': 'free', 'np': '2'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host': 'cert-wn64-02', 'state': 'free', 'np': '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 4 and container.freeCPU == 4) def test_parse_all_free_with_job(self): pattern_args = {'host': 'cert-wn64-01', 'state': 'free', 'np': '2'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host': 'cert-wn64-02', 'state': 'free', 'np': '4'} tmps = self.pbsnodesPattern % pattern_args tmps += ' jobs = 0/15.cert-34.pd.infn.it, 1/16.cert-34.pd.infn.it\n' self.workspace.appendToFile(tmps, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 6 and container.freeCPU == 4) def test_parse_half_busy(self): pattern_args = {'host': 'cert-wn64-01', 'state': 'busy', 'np': '2'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host': 'cert-wn64-02', 'state': 'free', 'np': '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 4 and container.freeCPU == 2) def test_parse_multi_state_down(self): pattern_args = { 'host': 'cert-wn64-01', 'state': 'offline,down', 'np': '2' } tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host': 'cert-wn64-02', 'state': 'free', 'np': '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 2 and container.freeCPU == 2) def test_parse_wrong_cpunum(self): try: pattern_args = { 'host': 'cert-wn64-01', 'state': 'free', 'np': '2a' } tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host': 'cert-wn64-02', 'state': 'free', 'np': '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.fail("No exception detected") except Exception, ex: msg = str(ex) self.assertTrue(msg.startswith("invalid literal for int"))
class SInfoTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.partPattern = "%(partid)s %(state)s %(cpuinfo)s %(maxcput)s %(defcput)s" self.partPattern += " %(jsize)s %(nodes)s %(maxcpun)s %(sct)s\n" def test_partition_ok(self): pattern_args = { 'partid': 'creamtest1', 'state': 'up', 'cpuinfo': '0/2/0/2', 'maxcput': '30:00', 'defcput': 'n/a', 'jsize': '1-infinite', 'nodes': '0/1/0/1', 'maxcpun': 'UNLIMITED', 'sct': '2:1:1' } tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2*' pattern_args['defcput'] = '15:00' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].maxRuntime == 1800 result = result and container['creamtest1'].defaultRuntime == 1800 result = result and container['creamtest1'].state == 'Production' result = result and container['creamtest2'].maxRuntime == 1800 result = result and container['creamtest2'].defaultRuntime == 900 result = result and container['creamtest2'].state == 'Production' self.assertTrue(result) def test_partition_one_closed(self): pattern_args = { 'partid': 'creamtest1', 'state': 'down', 'cpuinfo': '0/2/0/2', 'maxcput': '30:00', 'defcput': 'n/a', 'jsize': '1-infinite', 'nodes': '0/1/0/1', 'maxcpun': 'UNLIMITED', 'sct': '2:1:1' } tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2*' pattern_args['state'] = 'up' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].state == 'Closed' result = result and container['creamtest2'].state == 'Production' self.assertTrue(result) def test_partition_cpucount_ok(self): pattern_args = { 'partid': 'creamtest1', 'state': 'up', 'cpuinfo': '2/2/0/4', 'maxcput': '30:00', 'defcput': 'n/a', 'jsize': '1-infinite', 'nodes': '0/1/0/1', 'maxcpun': 'UNLIMITED', 'sct': '2:1:1' } tmpfile = self.workspace.createFile(self.partPattern % pattern_args) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].freeCPU == 2 result = result and container['creamtest1'].totalCPU == 4 self.assertTrue(result) def test_maxslot_node_ok(self): pattern_args = { 'partid': 'creamtest1', 'state': 'up', 'cpuinfo': '2/2/0/4', 'maxcput': '30:00', 'defcput': 'n/a', 'jsize': '1-10', 'nodes': '0/1/0/1', 'maxcpun': 'UNLIMITED', 'sct': '2:1:1' } tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2' pattern_args['jsize'] = '2' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].slotsPerJob == 20 result = result and container['creamtest2'].slotsPerJob == 4 self.assertTrue(result) def test_maxslot_cr_cpu_ok(self): pattern_args = { 'partid': 'creamtest1', 'state': 'up', 'cpuinfo': '2/2/0/4', 'maxcput': '30:00', 'defcput': 'n/a', 'jsize': '1-10', 'nodes': '0/1/0/1', 'maxcpun': 'UNLIMITED', 'sct': '2:2:1' } tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2' pattern_args['jsize'] = '2' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() config.slotType = 'CPU' container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].slotsPerJob == 40 result = result and container['creamtest2'].slotsPerJob == 8 self.assertTrue(result) def test_maxslot_undef_ok(self): pattern_args = { 'partid': 'creamtest1', 'state': 'up', 'cpuinfo': '2/2/0/4', 'maxcput': '30:00', 'defcput': 'n/a', 'jsize': '1-unlimited', 'nodes': '0/1/0/1', 'maxcpun': 'UNLIMITED', 'sct': '2:2:1' } tmpfile = self.workspace.createFile(self.partPattern % pattern_args) pattern_args['partid'] = 'creamtest2' pattern_args['jsize'] = '2' self.workspace.appendToFile(self.partPattern % pattern_args, tmpfile) config = DummyConfig() config.slotType = 'CPU' container = SInfoHandler.parsePartInfo(tmpfile) result = container['creamtest1'].slotsPerJob == -1 result = result and container['creamtest2'].slotsPerJob == 8 self.assertTrue(result)
class QStatTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.srvPattern = '''# qstat -B -f Server: cert-34.pd.infn.it server_state = Active scheduling = True total_jobs = 0 state_count = Transit:0 Queued:3 Held:0 Waiting:-3 Running:0 Exiting:0 acl_host_enable = False acl_hosts = cert-34.pd.infn.it managers = [email protected] operators = [email protected] default_queue = dteam log_events = 511 mail_from = adm query_other_jobs = True resources_assigned.nodect = 0 scheduler_iteration = 600 node_check_rate = 150 tcp_timeout = 6 default_node = lcgpro node_pack = False mail_domain = never pbs_version = %(lrmsver)s kill_delay = 10 next_job_number = 11 net_counter = 3 0 0 authorized_users = *@cert-34.pd.infn.it ''' self.jobPattern = '''# item from qstat -f Job Id: %(jserial)s.cert-34.pd.infn.it Job_Name = %(jname)s Job_Owner = [email protected] job_state = %(jstate)s queue = %(queue)s euser = dteam013 egroup = dteam qtime = %(qtime)s Resource_List.walltime = 36:00:00 %(pair1)s server = cert-34.pd.infn.it Checkpoint = u ctime = Wed Aug 21 11:37:25 2013 Error_Path = cert-34.pd.infn.it:/dev/null Hold_Types = n Join_Path = n Keep_Files = n Mail_Points = n mtime = Wed Aug 21 11:37:25 2013 Output_Path = cert-34.pd.infn.it:/dev/null Priority = 0 Rerunable = True Resource_List.neednodes = 1 Resource_List.nodect = 1 Resource_List.nodes = 1 Shell_Path_List = /bin/bash stagein = CREAM921657923_jobWrapper.sh.18190.15697.1377077844@cert-34.pd.infn.it stageout = [email protected] substate = 11 Variable_List = PBS_O_QUEUE=cert,PBS_O_HOME=/home/dteam013, PBS_O_LANG=en_US.UTF-8,PBS_O_LOGNAME=dteam013, PBS_O_PATH=/usr/kerberos/bin:/bin:/usr/bin:/home/dteam013/bin, PBS_O_MAIL=/var/spool/mail/dteam013,PBS_O_SHELL=/bin/sh, PBS_O_HOST=cert-34.pd.infn.it,PBS_SERVER=cert-34.pd.infn.it, PBS_O_WORKDIR=/var/tmp queue_rank = 23 queue_type = E etime = Wed Aug 21 11:37:25 2013 submit_args = /tmp/cream_921657923 fault_tolerant = False submit_host = cert-34.pd.infn.it init_work_dir = /var/tmp ''' self.queuePattern = '''# item from qstat -Q -f Queue: %(queue)s queue_type = Execution total_jobs = 0 state_count = Transit:0 Queued:0 Held:0 Waiting:0 Running:0 Exiting:0 resources_max.cput = %(maxcpu)s resources_max.walltime = %(maxwt)s acl_group_enable = True acl_groups = dteam,infngrid,testers mtime = 1375189536 resources_assigned.nodect = 0 enabled = True started = True ''' def test_lrmsver_ok(self): pattern = self.srvPattern % {'lrmsver' : '2.5.7'} tmpfile = self.workspace.createFile(pattern) self.assertTrue(QStatHandler.parseLRMSVersion(None, tmpfile) == '2.5.7') def test_lrmsver_missing(self): pattern = self.srvPattern.replace('pbs_version', 'no_version') tmpfile = self.workspace.createFile(pattern) self.assertTrue(QStatHandler.parseLRMSVersion(None, tmpfile) == None) def test_parse_job_qtime_ok(self): pattern_args = {'jserial' : '01', 'jname' : 'cream_921657923', 'jstate' : 'Q', 'qtime' : 'Wed Aug 21 11:37:25 2013', 'queue' : 'cert', 'pair1' : 'dummy1 = None'} tmpfile = self.workspace.createFile(self.jobPattern % pattern_args) pattern_args = {'jserial' : '02', 'jname' : 'cream_921657924', 'jstate' : 'Q', 'qtime' : 'Wed Aug 21 11:37:30 2013', 'queue' : 'cert', 'pair1' : 'dummy1 = None'} self.workspace.appendToFile(self.jobPattern % pattern_args, tmpfile) outList = list() QStatHandler.parse(outList, None, tmpfile) qtimeCount = 0 for jtable in outList: if jtable['qtime'] == 1377074245 or jtable['qtime'] == 1377074250: qtimeCount += 1 self.assertTrue(qtimeCount == 2) def test_parse_job_stime_ok(self): pattern_args = {'jserial' : '01', 'jname' : 'cream_921657923', 'jstate' : 'R', 'qtime' : 'Wed Aug 21 11:37:25 2013', 'queue' : 'cert', 'pair1' : 'start_time = Wed Aug 21 11:37:26 2013'} tmpfile = self.workspace.createFile(self.jobPattern % pattern_args) pattern_args = {'jserial' : '02', 'jname' : 'cream_921657924', 'jstate' : 'Q', 'qtime' : 'Wed Aug 21 11:37:30 2013', 'queue' : 'cert', 'pair1' : 'dummy1 = None'} self.workspace.appendToFile(self.jobPattern % pattern_args, tmpfile) outList = list() QStatHandler.parse(outList, None, tmpfile) stimeCount = 0 for jtable in outList: try: if jtable['start'] == 1377074246 and jtable['startAnchor'] == 'start_time': stimeCount += 1 except: pass self.assertTrue(stimeCount == 1) def test_parse_queue_ok(self): pattern_args = {'queue' : 'cert', 'maxcpu' : '24:00:00', 'maxwt' : '36:00:00'} tmpfile = self.workspace.createFile(self.queuePattern % pattern_args) container = QStatHandler.parseQueueInfo('cert', None, tmpfile) result = container.maxCPUtime == 86400 result = result and container.maxWallTime == 129600 self.assertTrue(result)
class SControlTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.nodePattern = '''NodeName=%(nname)s Arch=x86_64 CoresPerSocket=1 CPUAlloc=%(calloc)d CPUErr=0 CPUTot=%(ctot)d CPULoad=0.00 Features=(null) Gres=(null) NodeAddr=%(nname)s NodeHostName=%(nname)s OS=Linux RealMemory=1 AllocMem=0 Sockets=2 Boards=1 State=%(nstate)s ThreadsPerCore=1 TmpDisk=0 Weight=1 BootTime=2013-08-23T09:49:03 SlurmdStartTime=2013-08-23T10:04:46 CurrentWatts=0 LowestJoules=0 ConsumedJoules=0 ExtSensorsJoules=n/s ExtSensorsWatts=0 ExtSensorsTemp=n/s ''' self.jobPattern = '''JobId=%(jid)s Name=%(jname)s UserId=%(uid)s(0) GroupId=%(gid)s(0) Priority=4294901756 Account=(null) QOS=(null) JobState=%(jstate)s Reason=None Dependency=(null) Requeue=1 Restarts=0 BatchFlag=1 ExitCode=0:0 RunTime=00:01:00 TimeLimit=%(tlimit)s TimeMin=N/A SubmitTime=%(subtime)s EligibleTime=2013-08-26T11:54:52 StartTime=%(sttime)s EndTime=2013-08-26T11:55:52 PreemptTime=None SuspendTime=None SecsPreSuspend=0 Partition=%(pname)s AllocNode:Sid=cream-04:2682 ReqNodeList=(null) ExcNodeList=(null) NodeList=cream-42 BatchHost=cream-42 NumNodes=1 NumCPUs=%(ncpu)d CPUs/Task=1 ReqS:C:T=*:*:* MinCPUsNode=1 MinMemoryNode=0 MinTmpDiskNode=0 Features=(null) Gres=(null) Reservation=(null) Shared=0 Contiguous=0 Licenses=(null) Network=(null) Command=/root/test.sh WorkDir=/root ''' self.configPattern = '''Configuration data as of 2013-08-28T10:34:42 SelectType = %(seltype)s SelectTypeParameters = %(selpar)s SLURM_VERSION = %(version)s ''' def test_scontrol_all_free(self): pattern_args = {'nname' : 'cream-34', 'nstate' : 'IDLE', 'ctot' : 2, 'calloc' : 0} tmpfile = self.workspace.createFile(self.nodePattern % pattern_args) pattern_args['nname'] = 'cream-42' self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) pattern_args['nname'] = 'cream-46' self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) ncpu, freecpu = SControlInfoHandler.parseCPUInfo(tmpfile) self.assertTrue(ncpu == 6 and freecpu == 6) def test_scontrol_part_alloc(self): pattern_args = {'nname' : 'cream-34', 'nstate' : 'ALLOCATED+', 'ctot' : 4, 'calloc' : 4} tmpfile = self.workspace.createFile(self.nodePattern % pattern_args) pattern_args = {'nname' : 'cream-42', 'nstate' : 'IDLE', 'ctot' : 4, 'calloc' : 0} self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) pattern_args = {'nname' : 'cream-46', 'nstate' : 'DOWN*', 'ctot' : 4, 'calloc' : 0} self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) ncpu, freecpu = SControlInfoHandler.parseCPUInfo(tmpfile) self.assertTrue(ncpu == 8 and freecpu == 4) def test_scontrol_config_ok(self): pattern_args = {'version' : '2.6.0', 'seltype' : 'select/cons_res', 'selpar' : 'CR_CPU'} tmpfile = self.workspace.createFile(self.configPattern % pattern_args) container = SControlInfoHandler.parseConfiguration(tmpfile) result = container.selectType == 'select/cons_res' result = result and container.selectParams == 'CR_CPU' result = result and container.version == '2.6.0' self.assertTrue(result)
def setUp(self): self.workspace = Workspace() self.diagPattern = '''Displaying group information...
def setUp(self): self.workspace = Workspace() self.pbsnodesPattern = '''# item from pbsnodes -a
def setUp(self): self.workspace = Workspace()
def setUp(self): self.workspace = Workspace() self.diagPattern = """Displaying group information...
class PBSNodesTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.pbsnodesPattern = '''# item from pbsnodes -a %(host)s.pn.pd.infn.it state = %(state)s np = %(np)s properties = lcgpro ntype = cluster status = rectime=1376989178,varattr=,jobs=,state=free,netload=23614350258,gres=,loadave=0.00 gpus = 0 ''' def test_parse_all_free(self): pattern_args = {'host' : 'cert-wn64-01', 'state' : 'free', 'np' : '2'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host' : 'cert-wn64-02', 'state' : 'free', 'np' : '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 4 and container.freeCPU == 4) def test_parse_all_free_with_job(self): pattern_args = {'host' : 'cert-wn64-01', 'state' : 'free', 'np' : '2'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host' : 'cert-wn64-02', 'state' : 'free', 'np' : '4'} tmps = self.pbsnodesPattern % pattern_args tmps += ' jobs = 0/15.cert-34.pd.infn.it, 1/16.cert-34.pd.infn.it\n' self.workspace.appendToFile(tmps, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 6 and container.freeCPU == 4) def test_parse_half_busy(self): pattern_args = {'host' : 'cert-wn64-01', 'state' : 'busy', 'np' : '2'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host' : 'cert-wn64-02', 'state' : 'free', 'np' : '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 4 and container.freeCPU == 2) def test_parse_multi_state_down(self): pattern_args = {'host' : 'cert-wn64-01', 'state' : 'offline,down', 'np' : '2'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host' : 'cert-wn64-02', 'state' : 'free', 'np' : '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.assertTrue(container.totalCPU == 2 and container.freeCPU == 2) def test_parse_wrong_cpunum(self): try: pattern_args = {'host' : 'cert-wn64-01', 'state' : 'free', 'np' : '2a'} tmpfile = self.workspace.createFile(self.pbsnodesPattern % pattern_args) pattern_args = {'host' : 'cert-wn64-02', 'state' : 'free', 'np' : '2'} self.workspace.appendToFile(self.pbsnodesPattern % pattern_args, tmpfile) container = PBSNodesHandler.parseCPUInfo(None, tmpfile) self.fail("No exception detected") except Exception, ex: msg = str(ex) self.assertTrue(msg.startswith("invalid literal for int"))
class SControlTestCase(unittest.TestCase): def setUp(self): self.workspace = Workspace() self.nodePattern = '''NodeName=%(nname)s Arch=x86_64 CoresPerSocket=1 CPUAlloc=%(calloc)d CPUErr=0 CPUTot=%(ctot)d CPULoad=0.00 Features=(null) Gres=(null) NodeAddr=%(nname)s NodeHostName=%(nname)s OS=Linux RealMemory=1 AllocMem=0 Sockets=2 Boards=1 State=%(nstate)s ThreadsPerCore=1 TmpDisk=0 Weight=1 BootTime=2013-08-23T09:49:03 SlurmdStartTime=2013-08-23T10:04:46 CurrentWatts=0 LowestJoules=0 ConsumedJoules=0 ExtSensorsJoules=n/s ExtSensorsWatts=0 ExtSensorsTemp=n/s ''' self.jobPattern = '''JobId=%(jid)s Name=%(jname)s UserId=%(uid)s(0) GroupId=%(gid)s(0) Priority=4294901756 Account=(null) QOS=(null) JobState=%(jstate)s Reason=None Dependency=(null) Requeue=1 Restarts=0 BatchFlag=1 ExitCode=0:0 RunTime=00:01:00 TimeLimit=%(tlimit)s TimeMin=N/A SubmitTime=%(subtime)s EligibleTime=2013-08-26T11:54:52 StartTime=%(sttime)s EndTime=2013-08-26T11:55:52 PreemptTime=None SuspendTime=None SecsPreSuspend=0 Partition=%(pname)s AllocNode:Sid=cream-04:2682 ReqNodeList=(null) ExcNodeList=(null) NodeList=cream-42 BatchHost=cream-42 NumNodes=1 NumCPUs=%(ncpu)d CPUs/Task=1 ReqS:C:T=*:*:* MinCPUsNode=1 MinMemoryNode=0 MinTmpDiskNode=0 Features=(null) Gres=(null) Reservation=(null) Shared=0 Contiguous=0 Licenses=(null) Network=(null) Command=/root/test.sh WorkDir=/root ''' self.configPattern = '''Configuration data as of 2013-08-28T10:34:42 SelectType = %(seltype)s SelectTypeParameters = %(selpar)s SLURM_VERSION = %(version)s ''' def test_scontrol_all_free(self): pattern_args = { 'nname': 'cream-34', 'nstate': 'IDLE', 'ctot': 2, 'calloc': 0 } tmpfile = self.workspace.createFile(self.nodePattern % pattern_args) pattern_args['nname'] = 'cream-42' self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) pattern_args['nname'] = 'cream-46' self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) ncpu, freecpu = SControlInfoHandler.parseCPUInfo(tmpfile) self.assertTrue(ncpu == 6 and freecpu == 6) def test_scontrol_part_alloc(self): pattern_args = { 'nname': 'cream-34', 'nstate': 'ALLOCATED+', 'ctot': 4, 'calloc': 4 } tmpfile = self.workspace.createFile(self.nodePattern % pattern_args) pattern_args = { 'nname': 'cream-42', 'nstate': 'IDLE', 'ctot': 4, 'calloc': 0 } self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) pattern_args = { 'nname': 'cream-46', 'nstate': 'DOWN*', 'ctot': 4, 'calloc': 0 } self.workspace.appendToFile(self.nodePattern % pattern_args, tmpfile) ncpu, freecpu = SControlInfoHandler.parseCPUInfo(tmpfile) self.assertTrue(ncpu == 8 and freecpu == 4) def test_scontrol_config_ok(self): pattern_args = { 'version': '2.6.0', 'seltype': 'select/cons_res', 'selpar': 'CR_CPU' } tmpfile = self.workspace.createFile(self.configPattern % pattern_args) container = SControlInfoHandler.parseConfiguration(tmpfile) result = container.selectType == 'select/cons_res' result = result and container.selectParams == 'CR_CPU' result = result and container.version == '2.6.0' self.assertTrue(result)