def test_JobListInit(self):
        jl = JobList()

        try:
            jl.add('job1')
            self.fail("Invalid object added to job list")
        except AssertionError:
            pass

        jName1 = 'job1'
        j1 = Job(jName1, JobExecution('/bin/date'), JobResources(numCores=2))
        jl.add(j1)
        self.assertTrue(jl.get(jName1) is not None)
        self.assertTrue(jl.exist(jName1))

        try:
            jl.add(j1)
            self.fail('Non-uniqe job added to job list')
        except JobAlreadyExist:
            pass

        self.assertTrue(jl.get(jName1) is not None)
        self.assertTrue(jl.exist(jName1))

        jName2 = 'job2'
        jl.add(Job('job2', JobExecution('/bin/date'),
                   JobResources(numCores=1)))
        self.assertTrue(jl.get(jName2) is not None)
        self.assertTrue(jl.exist(jName2))
    def test_JobExecutionImportFromJSONComplex(self):
        exec = '/bin/date'
        stdout = "path/to/stdout"
        stderr = "path/to/stderr"
        wd = "path/to/wd"

        je_json = """{
		  "exec": "%s",
		  "args": [ "arg1", "arg2" ],
		  "env": { "var1": "val1", "var2": "val2" },
		  "stdout": "%s",
		  "stderr": "%s",
		  "wd": "%s"
		}""" % (exec, stdout, stderr, wd)

        je = JobExecution(**json.loads(je_json))

        self.assertEqual(exec, je.exec)
        self.assertEqual(je.stdout, stdout)
        self.assertEqual(je.stderr, stderr)
        self.assertEqual(je.wd, wd)
        self.assertIsInstance(je.args, list)
        self.assertTrue(len(je.args) == 2)
        for arg in je.args:
            self.assertIsInstance(arg, str)
            self.assertIsNotNone(arg)
        self.assertIsInstance(je.env, dict)
        for env_n, env_v in je.env.items():
            self.assertIsInstance(env_n, str)
            self.assertIsInstance(env_v, str)
            self.assertIsNotNone(env_n)
            self.assertIsNotNone(env_v)
    def test_JobVars(self):
        res = self.createLocalResources()
        manager = Manager(res, self.config)

        scriptFile = join(self.testSandbox, 'script.sh')
        if exists(scriptFile):
            os.remove(scriptFile)

        with open(scriptFile, 'w') as f:
            f.write('''
#!/bin/env bash

echo "*** environment ***"
env

			''')
        os.chmod(scriptFile, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR)

        jobs = [
            Job(
                'job1',
                JobExecution(
                    'bash',
                    args=[scriptFile],
                    wd='${root_wd}/job1',
                    stdout='${nnodes}.${ncores}-${jname}.stdout',
                    stderr='${nnodes}.${ncores}-${jname}.stderr',
                ),
                JobResources(numNodes=ResourceSize(1),
                             numCores=ResourceSize(2)))
        ]

        startTime = datetime.datetime.now()

        asyncio.get_event_loop().run_until_complete(
            asyncio.gather(self.__schedule(jobs, manager)))
        asyncio.get_event_loop().close()

        duration = datetime.datetime.now() - startTime
        self.assertTrue(duration.total_seconds() > 0
                        and duration.total_seconds() < 4)

        job1_wd = join(self.testSandbox, 'job1')
        self.assertTrue(os.path.exists(job1_wd))
        self.assertTrue(
            os.path.exists(os.path.join(job1_wd, '1.2-job1.stdout')))
        self.assertTrue(
            os.path.exists(os.path.join(job1_wd, '1.2-job1.stderr')))

        stderrStat = os.stat(os.path.join(job1_wd, '1.2-job1.stderr'))
        self.assertTrue(stderrStat.st_size == 0)

        with open(os.path.join(job1_wd, '1.2-job1.stdout'), 'r', 1) as file, \
             mmap.mmap(file.fileno(), 0, prot=mmap.PROT_READ) as s:
            self.assertTrue(s.find('QCG_PM_NTASKS=2'.encode('UTF-8')) != -1)
            self.assertTrue(
                s.find('QCG_PM_TASKS_PER_NODE=2'.encode('UTF-8')) != -1)
            self.assertTrue(s.find('QCG_PM_NNODES=1'.encode('UTF-8')) != -1)
            self.assertTrue(s.find('QCG_PM_NPROCS=2'.encode('UTF-8')) != -1)
 def __setupJobs(self):
     self.jobs = [
         Job(
             'msleep1',
             JobExecution(
                 '/usr/bin/sleep',
                 args=['5s'],
                 wd=abspath(join(self.testSandbox, 'sleep1.sandbox')),
                 stdout='sleep1.stdout',
                 stderr='sleep1.stderr',
             ), JobResources(numCores=ResourceSize(2))),
         Job(
             'msleep2',
             JobExecution(
                 '/usr/bin/sleep',
                 args=['5s'],
                 wd=abspath(join(self.testSandbox, 'sleep2.sandbox')),
                 stdout='sleep2.stdout',
                 stderr='sleep2.stderr',
             ), JobResources(numCores=ResourceSize(2))),
         Job(
             'mscript',
             JobExecution(
                 '/usr/bin/bash',
                 args=[self.scriptFile],
                 wd=abspath(join(self.testSandbox, 'script.sandbox')),
                 stdout='script.stdout',
                 stderr='script.stderr',
             ), JobResources(numCores=ResourceSize(2))),
         Job(
             'msleep3',
             JobExecution(
                 '/usr/bin/sleep',
                 args=['5s'],
                 wd=abspath(join(self.testSandbox, 'sleep3.sandbox')),
                 stdout='sleep3.stdout',
                 stderr='sleep3.stderr',
             ), JobResources(numCores=ResourceSize(1)))
     ]
    def test_JobExecutionImportExportEquality(self):
        je = JobExecution('/bin/hostname')

        je_json = je.toJSON()
        je_copy = JobExecution(**json.loads(je_json))
        je_json2 = je_copy.toJSON()

        self.assertEqual(je_json, je_json2)
    def test_JobExecutionImportFromJSONDefault(self):
        exec = '/bin/hostname'

        je_json = """{
		  "exec": "%s"
		}""" % (exec)

        je = JobExecution(**json.loads(je_json))

        self.assertEqual(exec, je.exec)
        self.assertIsNone(je.stdout)
        self.assertIsNone(je.stderr)
        self.assertIsNone(je.wd)
        self.assertIsInstance(je.args, list)
        self.assertIsInstance(je.env, dict)
    def test_JobInitAndJson(self):
        jName = 'job1'
        j = Job(jName, JobExecution('/bin/date'), JobResources(numCores=2))

        self.assertIsNotNone(j)
        self.assertIsNotNone(j.execution)
        self.assertEqual(j.name, jName)
        self.assertEqual(j.state, JobState.QUEUED)
        self.assertEqual(len(j.history), 1)
        self.assertEqual(j.hasDependencies(), False)

        j.state = JobState.EXECUTING
        self.assertEqual(j.state, JobState.EXECUTING)
        self.assertEqual(len(j.history), 2)

        j_json = j.toJSON()
        j_copy = Job(**json.loads(j_json))
        self.assertIsNotNone(j_copy)
        j_json2 = j_copy.toJSON()

        self.assertEqual(j_json, j_json2)

        self.compareJobs(j, j_copy)
    def __init__(self, executor, schema, allocation, job):
        assert allocation is not None
        assert job is not None
        assert schema is not None

        self.allocation = allocation
        self.job = job
        self.__schema = schema
        self.id = uuid.uuid4()
        self.__processTask = None
        self.__stdinF = None
        self.__stdoutF = None
        self.__stderrF = None
        self.exitCode = None
        self.__executor = executor
        self.errorMessage = None

        # temporary
        self.wdPath = '.'

        # inherit environment variables from parent process
        self.env = os.environ.copy()

        self.nnodes = len(self.allocation.nodeAllocations)
        self.ncores = sum(
            [node.cores for node in self.allocation.nodeAllocations])
        self.nlist = ','.join(
            [node.node.name for node in self.allocation.nodeAllocations])
        self.tasks_per_node = ','.join(
            [str(node.cores) for node in self.allocation.nodeAllocations])

        self.__setupJobVariables()

        # job execution description with variables replaced
        self.jobExecution = JobExecution(**json.loads(
            self.__substituteJobVariables(self.job.execution.toJSON())))
 def test_JobExecutionExportToJSONLength(self):
     je = JobExecution('/bin/hostname')
     j_json = je.toJSON()
     self.assertTrue(len(j_json) > 0)
    def test_ExecutorSimple(self):
        res = self.createLocalResources()
        manager = Manager(res)

        testSandbox = 'test-sandbox'
        if exists(testSandbox):
            shutil.rmtree(testSandbox)
        os.makedirs(testSandbox)

        for dir in [
                'hostname.sandbox', 'env.sandbox', 'sleep.sandbox',
                'script.sandbox'
        ]:
            dPath = join(testSandbox, dir)
            if exists(dPath):
                shutil.rmtree(dPath)

        scriptFile = abspath(join(testSandbox, 'script.sh'))
        if exists(scriptFile):
            os.remove(scriptFile)

        with open(scriptFile, 'w') as f:
            f.write('''
#!/bin/bash

echo "*** environment ***"
env

echo "*** info ***"
echo "host: `hostname --fqdn`"
echo "cwd: `pwd`"
echo "date: `date`"
echo "account: `id`"
echo "taskset: `taskset -p $$`"
			''')

        hostnameStdinFile = abspath(join(testSandbox, 'hostname.stdin'))
        if exists(hostnameStdinFile):
            os.remove(hostnameStdinFile)

        with open(hostnameStdinFile, 'w') as f:
            f.write('some host name')

        jobs = [
            Job(
                'job1',
                JobExecution(
                    '/usr/bin/wc',
                    args=['-m'],
                    wd=abspath(join(testSandbox, 'hostname.sandbox')),
                    stdin=hostnameStdinFile,
                    stdout='hostname.stdout',
                    stderr='hostname.stderr',
                ), JobResources(numCores=ResourceSize(2))),
            Job(
                'job2',
                JobExecution(
                    '/usr/bin/env',
                    wd=abspath(join(testSandbox, 'env.sandbox')),
                    stdout='env.stdout',
                    stderr='env.stderr',
                ), JobResources(numCores=ResourceSize(1))),
            Job(
                'sleep',
                JobExecution(
                    '/usr/bin/sleep',
                    args=['2s'],
                    wd=abspath(join(testSandbox, 'sleep.sandbox')),
                    stdout='sleep.stdout',
                    stderr='sleep.stderr',
                ), JobResources(numCores=ResourceSize(1))),
            Job(
                'script',
                JobExecution(
                    '/usr/bin/bash',
                    args=[scriptFile],
                    wd=abspath(join(testSandbox, 'script.sandbox')),
                    stdout='script.stdout',
                    stderr='script.stderr',
                ), JobResources(numCores=ResourceSize(1)))
        ]

        startTime = datetime.datetime.now()

        asyncio.get_event_loop().run_until_complete(
            asyncio.gather(self.__schedule(jobs, manager)))
        asyncio.get_event_loop().close()

        duration = datetime.datetime.now() - startTime
        self.assertTrue(duration.total_seconds() > 2
                        and duration.total_seconds() < 6)

        for job in jobs:
            self.assertTrue(os.path.exists(job.execution.wd))
            self.assertTrue(
                os.path.exists(
                    os.path.join(job.execution.wd, job.execution.stdout)))
            self.assertTrue(
                os.path.exists(
                    os.path.join(job.execution.wd, job.execution.stderr)))

            stderrStat = os.stat(
                os.path.join(job.execution.wd, job.execution.stderr))
            self.assertTrue(stderrStat.st_size == 0)