class TestCommandTaskWithMockPopen(MockLoggerMixin, unittest.TestCase):
    """ Run command tasks with a mocked popen """

    def setUp(self):
        self.global_config = BaseGlobalConfig()
        self.project_config = BaseProjectConfig(
            self.global_config, config={"noyaml": True}
        )
        self.task_config = TaskConfig()

        self._task_log_handler.reset()
        self.task_log = self._task_log_handler.messages

        self.Popen = MockPopen()
        self.r = Replacer()
        self.r.replace("cumulusci.tasks.command.subprocess.Popen", self.Popen)
        self.addCleanup(self.r.restore)

    def test_functional_mock_command(self):
        """ Functional test that runs a command with mocked
        popen results and checks the log.
        """

        self.task_config.config["options"] = {"command": "ls -la"}

        self.Popen.set_command("ls -la", stdout=b"testing testing 123", stderr=b"e")

        task = Command(self.project_config, self.task_config)
        task()

        self.assertTrue(any("testing testing" in s for s in self.task_log["info"]))
Example #2
0
 def test_pass_executable(self):
     Popen = MockPopen()
     Popen.set_command('a command', b'a', returncode=1)
     Popen('a command', executable='/foo/bar')
     compare(Popen.all_calls, expected=[
         call.Popen('a command', executable='/foo/bar')
     ])
Example #3
0
class TestGitPopenMockupMixin:
    def setup_git_popen(self):
        # repository mockup (in a temporary place)
        self.repository = Repo.init(self.tempdir.name)
        # setup git command mockup
        self.Popen = MockPopen()
        def FixPopen(*a, **k):
            if 'start_new_session' in k:
                del k['start_new_session']
            return self.Popen.Popen(*a, **k)
        self.Popen.mock.Popen.side_effect = FixPopen
        self.Popen.mock.Popen_instance.stdin = None
        self.Popen.mock.Popen_instance.wait = lambda *a, **k: self.Popen.wait()
        self.Popen.mock.Popen_instance.__enter__ = lambda self: self
        self.Popen.mock.Popen_instance.__exit__ = lambda self, *a, **k: None

    def set_mock_popen_commands(self, cmd_list):
        for cmd, out, err, rc in cmd_list:
            self.Popen.set_command(cmd, out, err, returncode=rc)

    def mockup_git(self, namespace, repository, url=None):
        # disable refspec check
        from git import remote
        remote.Remote._assert_refspec = lambda self: None
        # write FETCH_HEAD ref
        with open(os.path.join(self.repository.git_dir, 'FETCH_HEAD'), 'w') as f:
            url = url or "{}:{}/{}".format(self.service.fqdn, namespace, repository)
            f.write("749656b8b3b282d11a4221bb84e48291ca23ecc6" \
                    "		branch 'master' of {}".format(url))
        return Replace('git.cmd.Popen', self.Popen)
Example #4
0
def test_job():
    Popen = MockPopen()
    Popen.set_command('top', stdout=b'o', stderr=b'e', returncode=1, pid=1000)
    process = Popen('top', stdout=b'o', stderr=b'e', shell=True)
    process.wait()
    execute.kill_job(process)
    assert process.returncode == 1
Example #5
0
 def test_invalid_method_or_attr(self):
     Popen = MockPopen()
     Popen.set_command('command')
     process = Popen('command')
     with ShouldRaise(
             AttributeError("Mock object has no attribute 'foo'")):
         process.foo()
Example #6
0
 def test_invalid_terminate(self):
     Popen = MockPopen()
     Popen.set_command('bar')
     process = Popen('bar')
     with ShouldRaise(TypeError(
             "terminate() got an unexpected keyword argument 'foo'"
     )):
         process.terminate(foo='bar')
Example #7
0
 def test_read_from_stdout_with_stderr_redirected_check_stdout_stderr_interleaved(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command', stdout=b'o1\no2\no3\no4\n', stderr=b'e1\ne2\n')
     # usage
     process = Popen('a command', stdout=PIPE, stderr=STDOUT, shell=True)
     self.assertTrue(isinstance(process.stdout.fileno(), int))
     # test stdout contents
     compare(b'o1\ne1\no2\ne2\no3\no4\n', process.stdout.read())
Example #8
0
 def test_read_from_stdout_with_stderr_redirected_check_stdout_contents(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command', stdout=b'foo', stderr=b'bar')
     # usage
     process = Popen('a command', stdout=PIPE, stderr=STDOUT, shell=True)
     # test stdout contents
     compare(b'foobar', process.stdout.read())
     compare(process.stderr, None)
Example #9
0
 def test_start_new_session(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command')
     # usage
     Popen('a command', start_new_session=True)
     # test call list
     compare([
         call.Popen('a command', start_new_session=True),
     ], Popen.mock.method_calls)
Example #10
0
 def test_read_from_stdout_and_stderr(self):
     # setup
     Popen = MockPopen()
     Popen.set_command("a command", stdout=b"foo", stderr=b"bar")
     # usage
     process = Popen("a command", stdout=PIPE, stderr=PIPE, shell=True)
     compare(process.stdout.read(), b"foo")
     compare(process.stderr.read(), b"bar")
     # test call list
     compare([call.Popen("a command", shell=True, stderr=PIPE, stdout=PIPE)], Popen.mock.method_calls)
Example #11
0
 def test_invalid_poll(self):
     Popen = MockPopen()
     Popen.set_command("bar")
     process = Popen("bar")
     if PY2:
         text = "poll() takes exactly 1 argument (2 given)"
     else:
         text = "poll() takes 1 positional argument but 2 were given"
     with ShouldRaise(TypeError(text)):
         process.poll("moo")
Example #12
0
    def test_command_is_sequence(self):
        Popen = MockPopen()
        Popen.set_command("a command")

        process = Popen(["a", "command"], stdout=PIPE, stderr=PIPE)

        compare(process.wait(), 0)
        compare(
            [call.Popen(["a", "command"], stderr=-1, stdout=-1), call.Popen_instance.wait()], Popen.mock.method_calls
        )
Example #13
0
 def test_read_from_stderr(self):
     # setup
     Popen = MockPopen()
     Popen.set_command("a command", stderr=b"foo")
     # usage
     process = Popen("a command", stdout=PIPE, stderr=PIPE, shell=True)
     self.assertTrue(isinstance(process.stdout.fileno(), int))
     compare(process.stderr.read(), b"foo")
     # test call list
     compare([call.Popen("a command", shell=True, stderr=-1, stdout=-1)], Popen.mock.method_calls)
Example #14
0
 def test_invalid_poll(self):
     Popen = MockPopen()
     Popen.set_command('bar')
     process = Popen('bar')
     if PY2:
         text = 'poll() takes exactly 1 argument (2 given)'
     else:
         text = 'poll() takes 1 positional argument but 2 were given'
     with ShouldRaise(TypeError(text)):
         process.poll('moo')
Example #15
0
 def test_sprocess_safe_wait_and_return_code(self):
     command = "a command"
     Popen = MockPopen()
     Popen.set_command(command, returncode=3)
     process = Popen(command)
     compare(process.returncode, None)
     compare(process.wait(), 3)
     compare(process.returncode, 3)
     compare([
             call.Popen(command),
             call.Popen_instance.wait(),
             ], Popen.mock.method_calls)
Example #16
0
 def test_write_to_stdin(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command')
     # usage
     process = Popen('a command', stdin=PIPE, shell=True)
     process.stdin.write('some text')
     # test call list
     compare([
             call.Popen('a command', shell=True, stdin=PIPE),
             call.Popen_instance.stdin.write('some text'),
             ], Popen.mock.method_calls)
Example #17
0
 def test_read_from_stdout_and_stderr(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command', stdout=b'foo', stderr=b'bar')
     # usage
     process = Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
     compare(process.stdout.read(), b'foo')
     compare(process.stderr.read(), b'bar')
     # test call list
     compare([
             call.Popen('a command', shell=True, stderr=PIPE, stdout=PIPE),
             ], Popen.mock.method_calls)
Example #18
0
 def test_kill(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command')
     # usage
     process = Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
     process.kill()
     # result checking
     compare([
             call.Popen('a command', shell=True, stderr=-1, stdout=-1),
             call.Popen_instance.kill(),
             ], Popen.mock.method_calls)
Example #19
0
 def test_communicate_with_input(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command')
     # usage
     process = Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
     out, err = process.communicate('foo')
     # test call list
     compare([
             call.Popen('a command', shell=True, stderr=-1, stdout=-1),
             call.Popen_instance.communicate('foo'),
             ], Popen.mock.method_calls)
Example #20
0
 def test_wait_and_return_code(self):
     # setup
     Popen = MockPopen()
     Popen.set_command("a command", returncode=3)
     # usage
     process = Popen("a command")
     compare(process.returncode, None)
     # result checking
     compare(process.wait(), 3)
     compare(process.returncode, 3)
     # test call list
     compare([call.Popen("a command"), call.Popen_instance.wait()], Popen.mock.method_calls)
Example #21
0
 def test_terminate(self):
     # setup
     Popen = MockPopen()
     Popen.set_command("a command")
     # usage
     process = Popen("a command", stdout=PIPE, stderr=PIPE, shell=True)
     process.terminate()
     # result checking
     compare(
         [call.Popen("a command", shell=True, stderr=-1, stdout=-1), call.Popen_instance.terminate()],
         Popen.mock.method_calls,
     )
Example #22
0
 def test_sprocess_safe_read_from_stdout_and_stderr(self):
     command = "a command"
     Popen = MockPopen()
     #  only static input used with simulated mockpopen
     # codacy mistakenly sees this as a call to popen
     Popen.set_command(command, stdout=b'foo', stderr=b'bar')
     process = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)  # nosec
     compare(process.stdout.read(), b'foo')
     compare(process.stderr.read(), b'bar')
     compare([
             call.Popen(command, shell=True, stderr=PIPE,  # nosec
                        stdout=PIPE),
             ], Popen.mock.method_calls)
Example #23
0
    def test_callable_default_behaviour(self):
        def some_callable(command, stdin):
            return PopenBehaviour(BytesLiteral(command), BytesLiteral(stdin), 1, 345, 0)

        Popen = MockPopen()
        Popen.set_default(behaviour=some_callable)

        process = Popen('a command', stdin='some stdin', stdout=PIPE, stderr=PIPE)
        compare(process.pid, 345)

        out, err = process.communicate()

        compare(out, b'a command')
        compare(err, b'some stdin')
        compare(process.returncode, 1)
Example #24
0
 def test_sprocess_communicate_with_input(self):
     command = "a command"
     Popen = MockPopen()
     Popen.set_command(command)
     #  only static input used with simulated mockpopen
     # codacy mistakenly sees this as a call to popen
     process = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)  # nosec
     err, out = process.communicate('foo')
     compare([
             #  only static input used with simulated mockpopen
             # codacy mistakenly sees this as a call to popen
             call.Popen(command, shell=True, stderr=-1, stdout=-1),  # nosec
             call.Popen_instance.communicate('foo'),
             ], Popen.mock.method_calls)
     return err, out
Example #25
0
class PopenCommandsTest(unittest.TestCase):

    def setUp(self):
        self.Popen = MockPopen()
        self.r = Replacer()
        self.r.replace('swabbie.utils.command.subprocess.Popen', self.Popen)
        self.addCleanup(self.r.restore)

    def test_call_cmd(self):
        self.Popen.set_command('cmd1', stdout='foo')
        command_result = Command._call_cmd('cmd1')
        expected_command_result = CommandResult(output='foo', return_code=0)
        self.assertEqual(command_result.output, expected_command_result.output)
        self.assertEqual(command_result.return_code, expected_command_result.return_code)
        self.assertFalse(command_result.err)
Example #26
0
 def test_sprocess_safe_write_to_stdin(self):
     command = "a command"
     Popen = MockPopen()
     Popen.set_command(command)
     #  only static input used with simulated mockpopen
     # codacy mistakenly sees this as a call to popen
     process = Popen(command, stdin=PIPE, shell=True)  # nosec
     process.stdin.write(command)
     process.stdin.close()
     compare([
             # static input used with simulated mockpopen
             # codacy mistakenly sees this as a call to popen
             call.Popen(command, shell=True, stdin=PIPE),  # nosec
             call.Popen_instance.stdin.write(command),
             call.Popen_instance.stdin.close(),
             ], Popen.mock.method_calls)
Example #27
0
 def test_all_signals(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command')
     # usage
     process = Popen('a command')
     process.send_signal(signal.SIGINT)
     process.terminate()
     process.kill()
     # test call list
     compare([
             call.Popen('a command'),
             call.Popen_instance.send_signal(signal.SIGINT),
             call.Popen_instance.terminate(),
             call.Popen_instance.kill(),
             ], Popen.mock.method_calls)
Example #28
0
 def setup_method(self, method):
     self.log.info('GitRepoTestCase.setup_method({})'.format(method))
     # build temporary directory
     self.tempdir = TemporaryDirectory()
     # repository mockup (in a temporary place)
     self.repository = Repo.init(self.tempdir.name)
     # setup git command mockup
     self.Popen = MockPopen()
     self.Popen.mock.Popen_instance.stdin = None
     self.Popen.mock.Popen_instance.wait = lambda *a, **k: self.Popen.wait()
     self.Popen.mock.Popen_instance.__enter__ = lambda self: self
     self.Popen.mock.Popen_instance.__exit__ = lambda self, *a, **k: None
     # when initiating service with no repository, the connection is not triggered
     self.service = self.get_service()
     self.service.repository = self.repository
     # setup http api mockup
     self.recorder = betamax.Betamax(self.get_requests_session())
     self.get_requests_session().headers['Accept-Encoding'] = 'identity'
     # have git commands logged
     Git.GIT_PYTHON_TRACE = True
     FORMAT = '> %(message)s'
     formatter = logging.Formatter(fmt=FORMAT)
     handler = logging.StreamHandler()
     handler.setFormatter(formatter)
     logging.getLogger('git.cmd').removeHandler(logging.NullHandler())
     logging.getLogger('git.cmd').addHandler(handler)
     logging.getLogger('git.cmd').propagate = True
     # have HTTP requests logged
     import http.client
     http.client.HTTPConnection.debuglevel = 1
     logging.getLogger('requests.packages.urllib3').setLevel(logging.DEBUG)
     logging.getLogger('requests.packages.urllib3').propagate = True
Example #29
0
 def test_multiple_uses(self):
     Popen = MockPopen()
     Popen.set_command('a command', b'a')
     Popen.set_command('b command', b'b')
     process = Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
     out, err = process.communicate('foo')
     compare(out, b'a')
     process = Popen(['b', 'command'], stdout=PIPE, stderr=PIPE, shell=True)
     out, err = process.communicate('foo')
     compare(out, b'b')
     compare([
             call.Popen('a command', shell=True, stderr=-1, stdout=-1),
             call.Popen_instance.communicate('foo'),
             call.Popen(['b', 'command'], shell=True, stderr=-1, stdout=-1),
             call.Popen_instance.communicate('foo'),
             ], Popen.mock.method_calls)
class TestSubmit(TestCase):
    def setUp(self):
        self.popen = MockPopen()
        self.r = Replacer()
        self.r.replace('subprocess.Popen', self.popen)
        self.addCleanup(self.r.restore)

        # Wild hack to remove any trailing parameters which can influence bilby/condor job creation
        sys.argv = sys.argv[:1]

    @patch('db.update_job', side_effect=update_job_mock)
    @patch("db.get_unique_job_id", side_effect=get_unique_job_id_mock_fn)
    @patch("core.misc.working_directory",
           side_effect=working_directory_mock_fn)
    @patch("scheduler.slurm.SlurmScheduler.submit", side_effect=submit_mock_fn)
    @patch.object(settings, "scheduler", EScheduler.SLURM)
    def test_submit_real_data_job_slurm(self, *args, **kwargs):
        # Generate a minimal ini file
        ini = args_to_bilby_ini({
            'label': 'test-real',
            'detectors': ['H1'],
            'trigger-time': '12345678',
            'injection-numbers': []
        }).decode('utf-8')

        details = {'job_id': 1}

        with TemporaryDirectory() as td:
            global working_directory_mock_return, get_unique_job_id_mock_return, submit_mock_return, \
                update_job_result

            update_job_result = None

            working_directory_mock_return = os.path.join(td, 'job')

            # Configure the popen data generation mock
            popen_command = f'/bin/bash {td}/job/submit/test-real_data0_12345678-0_generation.sh'
            self.popen.set_command(popen_command,
                                   stdout=b'stdout test',
                                   stderr=b'stderr test')

            # Local imports so that the mocks work as expected
            from core.submit import submit

            submit_mock_return = 1234
            get_unique_job_id_mock_return = 4321

            params = dict(name='test-real',
                          description='Some description',
                          ini_string=ini)

            result = submit(details, json.dumps(params))

            # Check that the return value (The internal bundle submission id) is correct
            self.assertEqual(result, get_unique_job_id_mock_return)

            # Check that the internal job object was correctly created
            self.assertEqual(update_job_result['job_id'],
                             get_unique_job_id_mock_return)
            self.assertEqual(update_job_result['submit_id'],
                             submit_mock_return)
            self.assertEqual(update_job_result['working_directory'], td)
            self.assertEqual(update_job_result['submit_directory'],
                             'job/submit')

            # Check that the job script generation successfully called the the popen command
            process = call.Popen(popen_command,
                                 stderr=subprocess.PIPE,
                                 stdout=subprocess.PIPE,
                                 cwd=td,
                                 shell=True)
            compare(self.popen.all_calls,
                    expected=[
                        process,
                        process.wait(),
                        process.communicate(),
                        process.wait()
                    ])

            # Check the stdout and stderr logs for the data generation step are correctly written to their respective
            # log files
            with open(
                    os.path.join(td, 'job', 'log_data_generation',
                                 'test-real_data0_12345678-0_generation.out'),
                    'rb') as f:
                self.assertEqual(f.read(), b'stdout test')

            with open(
                    os.path.join(td, 'job', 'log_data_generation',
                                 'test-real_data0_12345678-0_generation.err'),
                    'rb') as f:
                self.assertEqual(f.read(), b'stderr test')

            # Check that the master slurm script was correctly modified
            with open(
                    os.path.join(td, 'job', 'submit',
                                 'slurm_test-real_master.sh'), 'r') as f:
                self.assertEqual(f.read(), """#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --output=job/submit/test-real_master_slurm.out
#SBATCH --error=job/submit/test-real_master_slurm.err

jid1=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=4G --time=7-00:00:00 --job-name=test-real_data0_12345678-0_analysis_H1_dynesty  --output=job/log_data_analysis/test-real_data0_12345678-0_analysis_H1_dynesty.out --error=job/log_data_analysis/test-real_data0_12345678-0_analysis_H1_dynesty.err job/submit/test-real_data0_12345678-0_analysis_H1_dynesty.sh))

echo "jid1 ${jid1[-1]}" >> job/submit/slurm_ids

jid2=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=4G --time=1:00:00 --job-name=test-real_data0_12345678-0_analysis_H1_dynesty_final_result --dependency=afterok:${jid1[-1]} --output=job/log_data_analysis/test-real_data0_12345678-0_analysis_H1_dynesty_final_result.out --error=job/log_data_analysis/test-real_data0_12345678-0_analysis_H1_dynesty_final_result.err job/submit/test-real_data0_12345678-0_analysis_H1_dynesty_final_result.sh))

echo "jid2 ${jid2[-1]}" >> job/submit/slurm_ids

jid3=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=32G --time=1:00:00 --job-name=test-real_data0_12345678-0_analysis_H1_dynesty_plot --dependency=afterok:${jid1[-1]} --output=job/log_data_analysis/test-real_data0_12345678-0_analysis_H1_dynesty_plot.out --error=job/log_data_analysis/test-real_data0_12345678-0_analysis_H1_dynesty_plot.err job/submit/test-real_data0_12345678-0_analysis_H1_dynesty_plot.sh))

echo "jid3 ${jid3[-1]}" >> job/submit/slurm_ids
"""

                                 # noqa
                                 )

                # Check that the ini file was correctly updated
                with open(
                        os.path.join(td, 'job',
                                     'test-real_config_complete.ini'),
                        'r') as f:
                    from core.submit import bilby_ini_to_args
                    args = bilby_ini_to_args(f.read())

                self.assertEqual(args.label, 'test-real')
                self.assertEqual(args.detectors, ["'H1'"])
                self.assertEqual(args.trigger_time, '12345678')
                self.assertEqual(args.outdir, os.path.join(td, 'job'))
                self.assertEqual(args.periodic_restart_time, 2147483647)
                self.assertEqual(args.scheduler, settings.scheduler.value)
                self.assertEqual(args.scheduler_env, settings.scheduler_env)
                self.assertEqual(args.transfer_files, False)

    @patch('db.update_job', side_effect=update_job_mock)
    @patch("db.get_unique_job_id", side_effect=get_unique_job_id_mock_fn)
    @patch("core.misc.working_directory",
           side_effect=working_directory_mock_fn)
    @patch("scheduler.slurm.SlurmScheduler.submit", side_effect=submit_mock_fn)
    @patch.object(settings, "scheduler", EScheduler.SLURM)
    def test_submit_simulated_data_job_slurm(self, *args, **kwargs):
        # Generate a minimal ini file
        ini = args_to_bilby_ini({
            'label': 'test-simulated',
            'detectors': ['H1', 'V1'],
            'trigger-time': '87654321',
            'n-simulation': 1,
            'gaussian_noise': True,
            'injection-numbers': []
        }).decode('utf-8')

        details = {'job_id': 1}

        with TemporaryDirectory() as td:
            global working_directory_mock_return, get_unique_job_id_mock_return, submit_mock_return, \
                update_job_result

            update_job_result = None

            working_directory_mock_return = os.path.join(td, 'job')

            # Configure the popen data generation mock
            popen_command = f'/bin/bash {td}/job/submit/test-simulated_data0_12345678-0_generation.sh'
            self.popen.set_command(popen_command,
                                   stdout=b'stdout test',
                                   stderr=b'stderr test')

            # Local imports so that the mocks work as expected
            from core.submit import submit

            submit_mock_return = 12345
            get_unique_job_id_mock_return = 54321

            params = dict(name='test-simulated',
                          description='Some description',
                          ini_string=ini)

            result = submit(details, json.dumps(params))

            # Check that the return value (The internal bundle submission id) is correct
            self.assertEqual(result, get_unique_job_id_mock_return)

            # Check that the internal job object was correctly created
            self.assertEqual(update_job_result['job_id'],
                             get_unique_job_id_mock_return)
            self.assertEqual(update_job_result['submit_id'],
                             submit_mock_return)
            self.assertEqual(update_job_result['working_directory'], td)
            self.assertEqual(update_job_result['submit_directory'],
                             'job/submit')

            # Check that the job script generation did not call the the popen command
            compare(self.popen.all_calls, expected=[])

            # Check the stdout and stderr logs for the data generation step do not exist
            self.assertFalse(
                os.path.exists(
                    os.path.join(
                        td, 'log_data_generation',
                        'test-simulated_data0_12345678-0_generation.out')))
            self.assertFalse(
                os.path.exists(
                    os.path.join(
                        td, 'log_data_generation',
                        'test-simulated_data0_12345678-0_generation.err')))

            # Check that the master slurm script was correctly modified
            with open(
                    os.path.join(td, 'job', 'submit',
                                 'slurm_test-simulated_master.sh'), 'r') as f:
                self.assertEqual(f.read(), """#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --output=job/submit/test-simulated_master_slurm.out
#SBATCH --error=job/submit/test-simulated_master_slurm.err

jid0=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=8G --time=1:00:00 --job-name=test-simulated_data0_87654321-0_generation  --output=job/log_data_generation/test-simulated_data0_87654321-0_generation.out --error=job/log_data_generation/test-simulated_data0_87654321-0_generation.err job/submit/test-simulated_data0_87654321-0_generation.sh))

echo "jid0 ${jid0[-1]}" >> job/submit/slurm_ids

jid1=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=4G --time=7-00:00:00 --job-name=test-simulated_data0_87654321-0_analysis_H1V1_dynesty --dependency=afterok:${jid0[-1]} --output=job/log_data_analysis/test-simulated_data0_87654321-0_analysis_H1V1_dynesty.out --error=job/log_data_analysis/test-simulated_data0_87654321-0_analysis_H1V1_dynesty.err job/submit/test-simulated_data0_87654321-0_analysis_H1V1_dynesty.sh))

echo "jid1 ${jid1[-1]}" >> job/submit/slurm_ids

jid2=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=4G --time=1:00:00 --job-name=test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result --dependency=afterok:${jid1[-1]} --output=job/log_data_analysis/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result.out --error=job/log_data_analysis/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result.err job/submit/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result.sh))

echo "jid2 ${jid2[-1]}" >> job/submit/slurm_ids

jid3=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=32G --time=1:00:00 --job-name=test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot --dependency=afterok:${jid1[-1]} --output=job/log_data_analysis/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot.out --error=job/log_data_analysis/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot.err job/submit/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot.sh))

echo "jid3 ${jid3[-1]}" >> job/submit/slurm_ids
"""

                                 # noqa
                                 )

                # Check that the ini file was correctly updated
                with open(
                        os.path.join(td, 'job',
                                     'test-simulated_config_complete.ini'),
                        'r') as f:
                    from core.submit import bilby_ini_to_args
                    args = bilby_ini_to_args(f.read())

                self.assertEqual(args.label, 'test-simulated')
                self.assertEqual(args.detectors, ["'H1'", "'V1'"])
                self.assertEqual(args.trigger_time, '87654321')
                self.assertEqual(args.n_simulation, 1)
                self.assertEqual(args.gaussian_noise, True)
                self.assertEqual(args.outdir, os.path.join(td, 'job'))
                self.assertEqual(args.periodic_restart_time, 2147483647)
                self.assertEqual(args.scheduler, settings.scheduler.value)
                self.assertEqual(args.scheduler_env, settings.scheduler_env)
                self.assertEqual(args.transfer_files, False)

    @patch('db.update_job', side_effect=update_job_mock)
    @patch("db.get_unique_job_id", side_effect=get_unique_job_id_mock_fn)
    @patch("core.misc.working_directory",
           side_effect=working_directory_mock_fn)
    @patch("scheduler.slurm.SlurmScheduler.submit", side_effect=submit_mock_fn)
    @patch.object(settings, "scheduler", EScheduler.SLURM)
    def test_submit_simulated_data_job_submission_error_slurm(
            self, *args, **kwargs):
        # Generate a minimal ini file
        ini = args_to_bilby_ini({
            'label': 'test-simulated-submission-failure',
            'detectors': ['V1', 'L1'],
            'trigger-time': '11111111',
            'n-simulation': 1,
            'gaussian_noise': True,
            'injection-numbers': []
        }).decode('utf-8')

        details = {'job_id': 1}

        with TemporaryDirectory() as td:
            global working_directory_mock_return, submit_mock_return, \
                update_job_result

            update_job_result = None

            working_directory_mock_return = os.path.join(td, 'job')

            # Configure the popen data generation mock
            popen_command = f'/bin/bash {td}/submit/test-simulated_data0_12345678-0_generation.sh'
            self.popen.set_command(popen_command,
                                   stdout=b'stdout test',
                                   stderr=b'stderr test')

            # Local imports so that the mocks work as expected
            from core.submit import submit

            submit_mock_return = None

            params = dict(name='test-simulated-submission-failure',
                          description='Some description',
                          ini_string=ini)

            result = submit(details, json.dumps(params))

            # Check that the return value (The internal bundle submission id) is correct
            self.assertEqual(result, None)

            # Check that the internal job object not created
            self.assertEqual(update_job_result, None)

            # Check that the job script generation did not call the the popen command
            compare(self.popen.all_calls, expected=[])

            # Check the stdout and stderr logs for the data generation step do not exist
            self.assertFalse(
                os.path.exists(
                    os.path.join(
                        td, 'log_data_generation',
                        'test-simulated_data0_12345678-0_generation.out')))
            self.assertFalse(
                os.path.exists(
                    os.path.join(
                        td, 'log_data_generation',
                        'test-simulated_data0_12345678-0_generation.err')))

            # Check that the master slurm script was correctly modified
            with open(
                    os.path.join(
                        td, 'job', 'submit',
                        'slurm_test-simulated-submission-failure_master.sh'),
                    'r') as f:
                self.assertEqual(f.read(), """#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --output=job/submit/test-simulated-submission-failure_master_slurm.out
#SBATCH --error=job/submit/test-simulated-submission-failure_master_slurm.err

jid0=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=8G --time=1:00:00 --job-name=test-simulated-submission-failure_data0_11111111-0_generation  --output=job/log_data_generation/test-simulated-submission-failure_data0_11111111-0_generation.out --error=job/log_data_generation/test-simulated-submission-failure_data0_11111111-0_generation.err job/submit/test-simulated-submission-failure_data0_11111111-0_generation.sh))

echo "jid0 ${jid0[-1]}" >> job/submit/slurm_ids

jid1=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=4G --time=7-00:00:00 --job-name=test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty --dependency=afterok:${jid0[-1]} --output=job/log_data_analysis/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty.out --error=job/log_data_analysis/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty.err job/submit/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty.sh))

echo "jid1 ${jid1[-1]}" >> job/submit/slurm_ids

jid2=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=4G --time=1:00:00 --job-name=test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_final_result --dependency=afterok:${jid1[-1]} --output=job/log_data_analysis/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_final_result.out --error=job/log_data_analysis/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_final_result.err job/submit/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_final_result.sh))

echo "jid2 ${jid2[-1]}" >> job/submit/slurm_ids

jid3=($(sbatch  --nodes=1 --ntasks-per-node=1 --mem=32G --time=1:00:00 --job-name=test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_plot --dependency=afterok:${jid1[-1]} --output=job/log_data_analysis/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_plot.out --error=job/log_data_analysis/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_plot.err job/submit/test-simulated-submission-failure_data0_11111111-0_analysis_L1V1_dynesty_plot.sh))

echo "jid3 ${jid3[-1]}" >> job/submit/slurm_ids
"""

                                 # noqa
                                 )

                # Check that the ini file was correctly updated
                with open(
                        os.path.join(
                            td, 'job',
                            'test-simulated-submission-failure_config_complete.ini'
                        ), 'r') as f:
                    from core.submit import bilby_ini_to_args
                    args = bilby_ini_to_args(f.read())

                self.assertEqual(args.label,
                                 'test-simulated-submission-failure')
                self.assertEqual(args.detectors, ["'V1'", "'L1'"])
                self.assertEqual(args.trigger_time, '11111111')
                self.assertEqual(args.n_simulation, 1)
                self.assertEqual(args.gaussian_noise, True)
                self.assertEqual(args.outdir, os.path.join(td, 'job'))
                self.assertEqual(args.periodic_restart_time, 2147483647)
                self.assertEqual(args.scheduler, settings.scheduler.value)
                self.assertEqual(args.scheduler_env, settings.scheduler_env)
                self.assertEqual(args.transfer_files, False)

    @patch('db.update_job', side_effect=update_job_mock)
    @patch("db.get_unique_job_id", side_effect=get_unique_job_id_mock_fn)
    @patch("core.misc.working_directory",
           side_effect=working_directory_mock_fn)
    @patch("scheduler.condor.CondorScheduler.submit",
           side_effect=submit_mock_fn)
    @patch.object(settings, "scheduler", EScheduler.CONDOR)
    def test_submit_real_data_job_condor(self, *args, **kwargs):
        # Generate a minimal ini file
        ini = args_to_bilby_ini({
            'label': 'test-real',
            'detectors': ['H1'],
            'trigger-time': '12345678',
            'injection-numbers': []
        }).decode('utf-8')

        details = {'job_id': 1}

        with TemporaryDirectory() as td:
            global working_directory_mock_return, get_unique_job_id_mock_return, submit_mock_return, \
                update_job_result

            update_job_result = None

            working_directory_mock_return = os.path.join(td, 'job')

            # Local imports so that the mocks work as expected
            from core.submit import submit

            submit_mock_return = 1234
            get_unique_job_id_mock_return = 4321

            params = dict(name='test-real',
                          description='Some description',
                          ini_string=ini)

            result = submit(details, json.dumps(params))

            # Check that the return value (The internal bundle submission id) is correct
            self.assertEqual(result, get_unique_job_id_mock_return)

            # Check that the internal job object was correctly created
            self.assertEqual(update_job_result['job_id'],
                             get_unique_job_id_mock_return)
            self.assertEqual(update_job_result['submit_id'],
                             submit_mock_return)
            self.assertEqual(update_job_result['working_directory'], td)
            self.assertEqual(update_job_result['submit_directory'],
                             'job/submit')

            # Check that the master slurm script was correctly modified
            with open(
                    os.path.join(td, 'job', 'submit', 'dag_test-real.submit'),
                    'r') as f:
                self.assertEqual(f.read(
                ), """JOB test-real_data0_12345678-0_generation_arg_0 job/submit/test-real_data0_12345678-0_generation.submit
VARS test-real_data0_12345678-0_generation_arg_0 ARGS="job/test-real_config_complete.ini --label test-real_data0_12345678-0_generation --idx 0 --trigger-time 12345678.0"
JOB test-real_data0_12345678-0_analysis_H1_dynesty_arg_0 job/submit/test-real_data0_12345678-0_analysis_H1_dynesty.submit
VARS test-real_data0_12345678-0_analysis_H1_dynesty_arg_0 ARGS="job/test-real_config_complete.ini --detectors H1 --label test-real_data0_12345678-0_analysis_H1_dynesty --data-dump-file job/data/test-real_data0_12345678-0_generation_data_dump.pickle --sampler dynesty"
JOB test-real_data0_12345678-0_analysis_H1_dynesty_final_result_arg_0 job/submit/test-real_data0_12345678-0_analysis_H1_dynesty_final_result.submit
VARS test-real_data0_12345678-0_analysis_H1_dynesty_final_result_arg_0 ARGS="--result job/result/test-real_data0_12345678-0_analysis_H1_dynesty_result.json --outdir job/final_result --extension json --max-samples 20000 --lightweight --save"
JOB test-real_data0_12345678-0_analysis_H1_dynesty_plot_arg_0 job/submit/test-real_data0_12345678-0_analysis_H1_dynesty_plot.submit
VARS test-real_data0_12345678-0_analysis_H1_dynesty_plot_arg_0 ARGS="--result job/result/test-real_data0_12345678-0_analysis_H1_dynesty_result.json --outdir job/result --corner --marginal --skymap --waveform --format png"

#Inter-job dependencies
Parent test-real_data0_12345678-0_generation_arg_0 Child test-real_data0_12345678-0_analysis_H1_dynesty_arg_0
Parent test-real_data0_12345678-0_analysis_H1_dynesty_arg_0 Child test-real_data0_12345678-0_analysis_H1_dynesty_final_result_arg_0
Parent test-real_data0_12345678-0_analysis_H1_dynesty_arg_0 Child test-real_data0_12345678-0_analysis_H1_dynesty_plot_arg_0"""

                                 # noqa
                                 )

                # Check that the ini file was correctly updated
                with open(
                        os.path.join(td, 'job',
                                     'test-real_config_complete.ini'),
                        'r') as f:
                    from core.submit import bilby_ini_to_args
                    args = bilby_ini_to_args(f.read())

                self.assertEqual(args.label, 'test-real')
                self.assertEqual(args.detectors, ["'H1'"])
                self.assertEqual(args.trigger_time, '12345678')
                self.assertEqual(args.outdir, os.path.join(td, 'job'))
                self.assertEqual(args.periodic_restart_time, 28800)
                self.assertEqual(args.scheduler, settings.scheduler.value)
                self.assertEqual(args.scheduler_env, settings.scheduler_env)
                self.assertEqual(args.accounting, 'no.group')
                self.assertEqual(args.transfer_files, False)

    @patch('db.update_job', side_effect=update_job_mock)
    @patch("db.get_unique_job_id", side_effect=get_unique_job_id_mock_fn)
    @patch("core.misc.working_directory",
           side_effect=working_directory_mock_fn)
    @patch("scheduler.condor.CondorScheduler.submit",
           side_effect=submit_mock_fn)
    @patch.object(settings, "scheduler", EScheduler.CONDOR)
    def test_submit_simulated_data_job_condor(self, *args, **kwargs):
        # Generate a minimal ini file
        ini = args_to_bilby_ini({
            'label': 'test-simulated',
            'detectors': ['H1', 'V1'],
            'trigger-time': '87654321',
            'n-simulation': 1,
            'gaussian_noise': True,
            'injection-numbers': []
        }).decode('utf-8')

        details = {'job_id': 1}

        with TemporaryDirectory() as td:
            global working_directory_mock_return, get_unique_job_id_mock_return, submit_mock_return, \
                update_job_result

            update_job_result = None

            working_directory_mock_return = os.path.join(td, 'job')

            # Local imports so that the mocks work as expected
            from core.submit import submit

            submit_mock_return = 1234
            get_unique_job_id_mock_return = 4321

            params = dict(name='test-real',
                          description='Some description',
                          ini_string=ini)

            result = submit(details, json.dumps(params))

            # Check that the return value (The internal bundle submission id) is correct
            self.assertEqual(result, get_unique_job_id_mock_return)

            # Check that the internal job object was correctly created
            self.assertEqual(update_job_result['job_id'],
                             get_unique_job_id_mock_return)
            self.assertEqual(update_job_result['submit_id'],
                             submit_mock_return)
            self.assertEqual(update_job_result['working_directory'], td)
            self.assertEqual(update_job_result['submit_directory'],
                             'job/submit')

            # Check that the master slurm script was correctly modified
            with open(
                    os.path.join(td, 'job', 'submit',
                                 'dag_test-simulated.submit'), 'r') as f:
                self.assertEqual(f.read(
                ), """JOB test-simulated_data0_87654321-0_generation_arg_0 job/submit/test-simulated_data0_87654321-0_generation.submit
VARS test-simulated_data0_87654321-0_generation_arg_0 ARGS="job/test-simulated_config_complete.ini --label test-simulated_data0_87654321-0_generation --idx 0 --trigger-time 87654321.0"
JOB test-simulated_data0_87654321-0_analysis_H1V1_dynesty_arg_0 job/submit/test-simulated_data0_87654321-0_analysis_H1V1_dynesty.submit
VARS test-simulated_data0_87654321-0_analysis_H1V1_dynesty_arg_0 ARGS="job/test-simulated_config_complete.ini --detectors H1 --detectors V1 --label test-simulated_data0_87654321-0_analysis_H1V1_dynesty --data-dump-file job/data/test-simulated_data0_87654321-0_generation_data_dump.pickle --sampler dynesty"
JOB test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result_arg_0 job/submit/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result.submit
VARS test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result_arg_0 ARGS="--result job/result/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_result.json --outdir job/final_result --extension json --max-samples 20000 --lightweight --save"
JOB test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot_arg_0 job/submit/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot.submit
VARS test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot_arg_0 ARGS="--result job/result/test-simulated_data0_87654321-0_analysis_H1V1_dynesty_result.json --outdir job/result --corner --marginal --skymap --waveform --format png"

#Inter-job dependencies
Parent test-simulated_data0_87654321-0_generation_arg_0 Child test-simulated_data0_87654321-0_analysis_H1V1_dynesty_arg_0
Parent test-simulated_data0_87654321-0_analysis_H1V1_dynesty_arg_0 Child test-simulated_data0_87654321-0_analysis_H1V1_dynesty_final_result_arg_0
Parent test-simulated_data0_87654321-0_analysis_H1V1_dynesty_arg_0 Child test-simulated_data0_87654321-0_analysis_H1V1_dynesty_plot_arg_0"""

                                 # noqa
                                 )

                # Check that the ini file was correctly updated
                with open(
                        os.path.join(td, 'job',
                                     'test-simulated_config_complete.ini'),
                        'r') as f:
                    from core.submit import bilby_ini_to_args
                    args = bilby_ini_to_args(f.read())

                self.assertEqual(args.label, 'test-simulated')
                self.assertEqual(args.detectors, ["'H1'", "'V1'"])
                self.assertEqual(args.trigger_time, '87654321')
                self.assertEqual(args.n_simulation, 1)
                self.assertEqual(args.gaussian_noise, True)
                self.assertEqual(args.outdir, os.path.join(td, 'job'))
                self.assertEqual(args.periodic_restart_time, 28800)
                self.assertEqual(args.scheduler, settings.scheduler.value)
                self.assertEqual(args.scheduler_env, settings.scheduler_env)
                self.assertEqual(args.accounting, 'no.group')
                self.assertEqual(args.transfer_files, False)
Example #31
0
class TestSlurmDockerRunner(unittest.TestCase):
    def setUp(self):
        log.setLevel("CRITICAL")
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace("popper.runner_host.Popen", self.Popen)
        self.addCleanup(replacer.restore)

    def tearDown(self):
        log.setLevel("NOTSET")

    def test_create_cmd(self):
        config = {"workspace_dir": "/w"}
        with DockerRunner(config=ConfigLoader.load(**config)) as drunner:
            step = Box({"args": ["-two", "-flags"]}, default_box=True)
            cmd = drunner._create_cmd(step, "foo:1.9", "container_name")

            expected = ("docker create"
                        " --name container_name"
                        " --workdir /workspace"
                        " -v /w:/workspace"
                        " -v /var/run/docker.sock:/var/run/docker.sock"
                        " foo:1.9 -two -flags")

            self.assertEqual(expected, cmd)

        config_dict = {
            "engine": {
                "name": "docker",
                "options": {
                    "privileged": True,
                    "hostname": "popper.local",
                    "domainname": "www.example.org",
                    "volumes": ["/path/in/host:/path/in/container"],
                    "environment": {
                        "FOO": "bar"
                    },
                },
            },
            "resource_manager": {
                "name": "slurm"
            },
        }

        config = {"workspace_dir": "/w", "config_file": config_dict}
        with DockerRunner(config=ConfigLoader.load(**config)) as drunner:
            step = Box({"args": ["-two", "-flags"]}, default_box=True)
            cmd = drunner._create_cmd(step, "foo:1.9", "container_name")

            expected = ("docker create --name container_name "
                        "--workdir /workspace "
                        "-v /w:/workspace "
                        "-v /var/run/docker.sock:/var/run/docker.sock "
                        "-v /path/in/host:/path/in/container "
                        "-e FOO=bar --privileged --hostname popper.local "
                        "--domainname www.example.org "
                        "foo:1.9 -two -flags")

            self.assertEqual(expected, cmd)

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_run(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "docker",
                "options": {
                    "privileged": True,
                    "hostname": "popper.local",
                    "domainname": "www.example.org",
                    "volumes": ["/path/in/host:/path/in/container"],
                    "environment": {
                        "FOO": "bar"
                    },
                },
            },
            "resource_manager": {
                "name": "slurm"
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            f"sbatch --wait --job-name popper_1_{config.wid} "
            f"--output /tmp/popper/slurm/popper_1_{config.wid}.out "
            f"/tmp/popper/slurm/popper_1_{config.wid}.sh",
            returncode=0,
        )

        self.Popen.set_command(
            f"tail -f /tmp/popper/slurm/popper_1_{config.wid}.out",
            returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "runs": ["cat"],
                    "args": ["README.md"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

        with open(f"/tmp/popper/slurm/popper_1_{config.wid}.sh", "r") as f:
            # fmt: off
            expected = f"""#!/bin/bash
docker rm -f popper_1_{config.wid} || true
docker build -t popperized/bin:master {os.environ['HOME']}/.cache/popper/{config.wid}/github.com/popperized/bin/sh
docker create --name popper_1_{config.wid} --workdir /workspace --entrypoint cat -v /w:/workspace -v /var/run/docker.sock:/var/run/docker.sock -v /path/in/host:/path/in/container -e FOO=bar --privileged --hostname popper.local --domainname www.example.org popperized/bin:master README.md
docker start --attach popper_1_{config.wid}"""
            # fmt: on
            actual = f.read()
            self.maxDiff = None
            self.assertEqual(expected, actual)
Example #32
0
class TestSlurmSlurmRunner(PopperTest):
    def setUp(self):
        log.setLevel("CRITICAL")
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace("popper.runner_host.Popen", self.Popen)
        self.addCleanup(replacer.restore)

    def tearDown(self):
        log.setLevel("NOTSET")

    def test_tail_output(self):
        self.Popen.set_command("tail -f slurm-x.out", returncode=0)
        with SlurmRunner(config=ConfigLoader.load()) as sr:
            self.assertEqual(sr._tail_output("slurm-x.out"), 0)
            self.assertEqual(len(sr._out_stream_pid), 1)

    def test_stop_running_tasks(self):
        self.Popen.set_command("scancel --name job_a", returncode=0)
        with SlurmRunner(config=ConfigLoader.load()) as sr:
            sr._spawned_jobs.add("job_a")
            sr.stop_running_tasks()
            compare(
                call.Popen(
                    ["scancel", "--name", "job_a"],
                    cwd=os.getcwd(),
                    env=None,
                    preexec_fn=os.setsid,
                    stderr=-2,
                    stdout=-1,
                    universal_newlines=True,
                ),
                self.Popen.all_calls[0],
            )

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_submit_batch_job(self, mock_kill):
        config = ConfigLoader.load(workspace_dir="/w")
        self.Popen.set_command(
            "sbatch --wait "
            f"--job-name popper_sample_{config.wid} "
            f"--output /tmp/popper/slurm/popper_sample_{config.wid}.out "
            f"/tmp/popper/slurm/popper_sample_{config.wid}.sh",
            returncode=0,
        )
        self.Popen.set_command(
            f"tail -f /tmp/popper/slurm/popper_sample_{config.wid}.out",
            returncode=0)
        step = Box({"id": "sample"}, default_box=True)
        with SlurmRunner(config=config) as sr:
            sr._submit_batch_job(["ls -la"], step)
            with open(f"/tmp/popper/slurm/popper_sample_{config.wid}.sh",
                      "r") as f:
                content = f.read()

            self.assertEqual(content, "#!/bin/bash\nls -la")
            self.assertEqual(len(sr._spawned_jobs), 0)
            self.assertEqual(sr._out_stream_thread.is_alive(), False)

        call_tail = call.Popen(
            [
                "tail", "-f",
                f"/tmp/popper/slurm/popper_sample_{config.wid}.out"
            ],
            cwd=os.getcwd(),
            env=None,
            preexec_fn=os.setsid,
            stderr=-2,
            stdout=-1,
            universal_newlines=True,
        )

        call_sbatch = call.Popen(
            [
                "sbatch",
                "--wait",
                "--job-name",
                f"popper_sample_{config.wid}",
                "--output",
                f"/tmp/popper/slurm/popper_sample_{config.wid}.out",
                f"/tmp/popper/slurm/popper_sample_{config.wid}.sh",
            ],
            cwd=os.getcwd(),
            env=None,
            preexec_fn=os.setsid,
            stderr=-2,
            stdout=-1,
            universal_newlines=True,
        )

        self.assertEqual(call_tail in self.Popen.all_calls, True)
        self.assertEqual(call_sbatch in self.Popen.all_calls, True)

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_submit_job_failure(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "docker",
                "options": {}
            },
            "resource_manager": {
                "name": "slurm",
                "options": {}
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            f"sbatch --wait --job-name popper_1_{config.wid} "
            f"--output /tmp/popper/slurm/popper_1_{config.wid}.out "
            f"/tmp/popper/slurm/popper_1_{config.wid}.sh",
            returncode=12,
        )

        self.Popen.set_command(
            f"tail -f /tmp/popper/slurm/popper_1_{config.wid}.out",
            returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "runs": ["cat"],
                    "args": ["README.md"],
                }]
            }
            self.assertRaises(SystemExit, r.run,
                              WorkflowParser.parse(wf_data=wf_data))

            call_tail = call.Popen(
                ["tail", "-f", f"/tmp/popper/slurm/popper_1_{config.wid}.out"],
                cwd=os.getcwd(),
                env=None,
                preexec_fn=os.setsid,
                stderr=-2,
                stdout=-1,
                universal_newlines=True,
            )

            call_sbatch = call.Popen(
                [
                    "sbatch",
                    "--wait",
                    "--job-name",
                    f"popper_1_{config.wid}",
                    "--output",
                    f"/tmp/popper/slurm/popper_1_{config.wid}.out",
                    f"/tmp/popper/slurm/popper_1_{config.wid}.sh",
                ],
                cwd=os.getcwd(),
                env=None,
                preexec_fn=os.setsid,
                stderr=-2,
                stdout=-1,
                universal_newlines=True,
            )

            self.assertEqual(call_tail in self.Popen.all_calls, True)
            self.assertEqual(call_sbatch in self.Popen.all_calls, True)

    def test_dry_run(self):
        config = ConfigLoader.load(
            engine_name="docker",
            resman_name="slurm",
            dry_run=True,
        )

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "runs": ["cat"],
                    "args": ["README.md"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

        self.assertEqual(self.Popen.all_calls, [])
Example #33
0
class TestSlurmSingularityRunner(unittest.TestCase):
    def setUp(self):
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace("popper.runner_host.Popen", self.Popen)
        self.addCleanup(replacer.restore)

    def tearDown(self):
        log.setLevel("NOTSET")

    def test_create_cmd(self):
        config = ConfigLoader.load(workspace_dir="/w")
        with SingularityRunner(config=config) as sr:
            step = Box({"args": ["-two", "-flags"]}, default_box=True)
            sr._setup_singularity_cache()
            sr._container = os.path.join(sr._singularity_cache, "c1.sif")
            cmd = sr._create_cmd(step, "c1.sif")

            expected = (
                "singularity run"
                " --userns --pwd /workspace"
                " --bind /w:/workspace"
                f' {os.environ["HOME"]}/.cache/popper/singularity/{config.wid}/c1.sif'
                " -two -flags")

            self.assertEqual(expected, cmd)

        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "ipc": True,
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm"
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        with SingularityRunner(config=config) as sr:
            step = Box({"args": ["-two", "-flags"]}, default_box=True)
            sr._setup_singularity_cache()
            sr._container = os.path.join(sr._singularity_cache, "c2.sif")
            cmd = sr._create_cmd(step, "c2.sif")

            # fmt: off
            expected = f"singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local --ipc {os.environ['HOME']}/.cache/popper/singularity/{config.wid}/c2.sif -two -flags"
            # fmt: on

            self.assertEqual(expected, cmd)

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_slurm_singularity_run(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm"
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        # fmt: off
        self.Popen.set_command(
            f"sbatch --wait --job-name popper_1_{config.wid} --output /tmp/popper/slurm/popper_1_{config.wid}.out /tmp/popper/slurm/popper_1_{config.wid}.sh",
            returncode=0,
        )
        # fmt: on

        self.Popen.set_command(
            f"tail -f /tmp/popper/slurm/popper_1_{config.wid}.out",
            returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "args": ["ls"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

        with open(f"/tmp/popper/slurm/popper_1_{config.wid}.sh", "r") as f:
            # fmt: off
            expected = f"""#!/bin/bash
singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local {os.environ['HOME']}/.cache/popper/singularity/{config.wid}/popper_1_{config.wid}.sif ls"""
            # fmt: on
            actual = f.read()
        self.assertEqual(expected, actual)
Example #34
0
 def setUp(self):
     self.Popen = MockPopen()
     self.r = Replacer()
     self.r.replace(dotted_path, self.Popen)
     self.addCleanup(self.r.restore)
Example #35
0
class TestSlurmSlurmRunner(PopperTest):
    def setUp(self):
        log.setLevel('CRITICAL')
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace('popper.runner_host.Popen', self.Popen)
        self.addCleanup(replacer.restore)
        self.repo = tempfile.mkdtemp()

    def tearDown(self):
        log.setLevel('NOTSET')

    def test_tail_output(self):
        self.Popen.set_command('tail -f slurm-x.out', returncode=0)
        with SlurmRunner(config=PopperConfig()) as sr:
            self.assertEqual(sr._tail_output('slurm-x.out'), 0)
            self.assertEqual(len(sr._out_stream_pid), 1)

    def test_stop_running_tasks(self):
        self.Popen.set_command('scancel --name job_a', returncode=0)
        with SlurmRunner(config=PopperConfig()) as sr:
            sr._spawned_jobs.add('job_a')
            sr.stop_running_tasks()
            self.assertEqual(
                call.Popen(['scancel', '--name', 'job_a'],
                           cwd=os.getcwd(),
                           env=None,
                           preexec_fn=os.setsid,
                           stderr=-2,
                           stdout=-1,
                           universal_newlines=True) in self.Popen.all_calls,
                True)

    @replace('popper.runner_slurm.os.kill', mock_kill)
    def test_submit_batch_job(self, mock_kill):
        self.Popen.set_command(
            'sbatch --wait '
            '--job-name popper_sample_123abc '
            '--output /tmp/popper/slurm/popper_sample_123abc.out '
            '/tmp/popper/slurm/popper_sample_123abc.sh',
            returncode=0)
        self.Popen.set_command(
            'tail -f /tmp/popper/slurm/popper_sample_123abc.out', returncode=0)
        config = PopperConfig(workspace_dir='/w')
        config.wid = "123abc"
        step = {"name": "sample"}
        with SlurmRunner(config=config) as sr:
            sr._submit_batch_job(["ls -la"], step)
            with open("/tmp/popper/slurm/popper_sample_123abc.sh", 'r') as f:
                content = f.read()

            self.assertEqual(content, "#!/bin/bash\nls -la")
            self.assertEqual(len(sr._spawned_jobs), 0)
            self.assertEqual(sr._out_stream_thread.is_alive(), False)

        call_tail = call.Popen(
            ['tail', '-f', '/tmp/popper/slurm/popper_sample_123abc.out'],
            cwd=os.getcwd(),
            env=None,
            preexec_fn=os.setsid,
            stderr=-2,
            stdout=-1,
            universal_newlines=True)

        call_sbatch = call.Popen([
            'sbatch', '--wait', '--job-name', 'popper_sample_123abc',
            '--output', '/tmp/popper/slurm/popper_sample_123abc.out',
            '/tmp/popper/slurm/popper_sample_123abc.sh'
        ],
                                 cwd=os.getcwd(),
                                 env=None,
                                 preexec_fn=os.setsid,
                                 stderr=-2,
                                 stdout=-1,
                                 universal_newlines=True)

        self.assertEqual(call_tail in self.Popen.all_calls, True)
        self.assertEqual(call_sbatch in self.Popen.all_calls, True)

    @replace('popper.runner_slurm.os.kill', mock_kill)
    def test_submit_job_failure(self, mock_kill):
        self.Popen.set_command(
            'sbatch --wait --job-name popper_1_123abc '
            '--output /tmp/popper/slurm/popper_1_123abc.out '
            '/tmp/popper/slurm/popper_1_123abc.sh',
            returncode=12)

        self.Popen.set_command('tail -f /tmp/popper/slurm/popper_1_123abc.out',
                               returncode=0)

        config_dict = {
            'engine': {
                'name': 'docker',
                'options': {}
            },
            'resource_manager': {
                'name': 'slurm',
                'options': {}
            }
        }

        config = PopperConfig(workspace_dir='/w', config_file=config_dict)
        config.wid = "123abc"

        with WorkflowRunner(config) as r:
            wf = YMLWorkflow("""
            version: '1'
            steps:
            - uses: 'popperized/bin/sh@master'
              runs: [cat]
              args: README.md
            """)
            wf.parse()
            self.assertRaises(SystemExit, r.run, wf)

            call_tail = call.Popen(
                ['tail', '-f', '/tmp/popper/slurm/popper_1_123abc.out'],
                cwd=os.getcwd(),
                env=None,
                preexec_fn=os.setsid,
                stderr=-2,
                stdout=-1,
                universal_newlines=True)

            call_sbatch = call.Popen([
                'sbatch', '--wait', '--job-name', 'popper_1_123abc',
                '--output', '/tmp/popper/slurm/popper_1_123abc.out',
                '/tmp/popper/slurm/popper_1_123abc.sh'
            ],
                                     cwd=os.getcwd(),
                                     env=None,
                                     preexec_fn=os.setsid,
                                     stderr=-2,
                                     stdout=-1,
                                     universal_newlines=True)

            self.assertEqual(call_tail in self.Popen.all_calls, True)
            self.assertEqual(call_sbatch in self.Popen.all_calls, True)

    def test_dry_run(self):
        repo = self.mk_repo()
        config = PopperConfig(engine_name='docker',
                              resman_name='slurm',
                              dry_run=True,
                              workspace_dir=repo.working_dir)

        with WorkflowRunner(config) as r:
            wf = YMLWorkflow("""
            version: '1'
            steps:
            - uses: 'popperized/bin/sh@master'
              runs: [cat]
              args: README.md
            """)
            wf.parse()
            r.run(wf)

        self.assertEqual(self.Popen.all_calls, [])
Example #36
0
class TestSlurmSingularityRunner(unittest.TestCase):
    def setUp(self):
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace("popper.runner_host.Popen", self.Popen)
        self.addCleanup(replacer.restore)

    def tearDown(self):
        log.setLevel("NOTSET")

    def test_create_cmd(self):
        config = ConfigLoader.load(workspace_dir="/w")
        with SingularityRunner(config=config) as sr:
            step = Box({"args": ["-two", "-flags"]}, default_box=True)
            sr._container = "c1.sif"
            cmd = sr._create_cmd(step, "c1.sif")

            expected = ("singularity run"
                        " --userns --pwd /workspace"
                        " --bind /w:/workspace"
                        " c1.sif"
                        " -two -flags")

            self.assertEqual(expected.split(" "), cmd)

        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "ipc": True,
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm"
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        with SingularityRunner(config=config) as sr:
            step = Box({"args": ["-two", "-flags"]}, default_box=True)
            sr._container = "c2.sif"
            cmd = sr._create_cmd(step, "c2.sif")

            # fmt: off
            expected = f"singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local --ipc c2.sif -two -flags"
            # fmt: on
            self.assertEqual(expected.split(" "), cmd)

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_run(self, mock_kill):
        self.maxDiff = None
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "1": {
                        "nodes": 2,
                        "ntasks": 2,
                        "nodelist": "worker1,worker2"
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        # fmt: off
        self.Popen.set_command(
            "sbatch "
            "--wait "
            f"popper_1_{config.wid}.sh",
            returncode=0,
        )
        # fmt: on

        self.Popen.set_command(f"tail -f popper_1_{config.wid}.out",
                               returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {"steps": [{"uses": "docker://alpine", "args": ["ls"]}]}
            r.run(WorkflowParser.parse(wf_data=wf_data))

        with open(f"popper_1_{config.wid}.sh", "r") as f:
            # fmt: off
            expected = f"""#!/bin/bash
#SBATCH --job-name=popper_1_{config.wid}
#SBATCH --output=popper_1_{config.wid}.out
#SBATCH --nodes=2
#SBATCH --ntasks=2
#SBATCH --ntasks-per-node=1
#SBATCH --nodelist=worker1,worker2
mpirun singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local {os.environ['HOME']}/.cache/popper/singularity/{config.wid}/popper_1_{config.wid}.sif ls"""
            # fmt: on
            actual = f.read()
            self.assertEqual(expected, actual)

        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "1": {
                        "mpi": False,
                        "nodes": 2,
                        "ntasks": 2,
                        "nodelist": "worker1,worker2",
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 rm -rf popper_1_{config.wid}.sif",
            returncode=0,
        )

        self.Popen.set_command(
            f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 singularity pull docker://alpine:latest",
            returncode=0,
        )

        self.Popen.set_command(
            f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local {os.environ['HOME']}/.cache/popper/singularity/{config.wid}/popper_1_{config.wid}.sif ls",
            returncode=0,
        )

        with WorkflowRunner(config) as r:
            wf_data = {"steps": [{"uses": "docker://alpine", "args": ["ls"]}]}
            r.run(WorkflowParser.parse(wf_data=wf_data))
Example #37
0
def mock_popen(monkeypatch):
    mock_popen = MockPopen()
    monkeypatch.setattr('apployer.cf_cli.Popen', mock_popen)
    yield mock_popen
    assert mock_popen.mock.method_calls
Example #38
0
class CppInsightsTestCase(unittest.TestCase):
    def mock_mkstemp(self, suffix=None, prefix=None, dir=None, text=False):
        self.fd = open('/tmp/pyt.cpp', "w+")
        return [self.fd.fileno(), '/tmp/pyt.cpp']
    #------------------------------------------------------------------------------

    def setUp(self):
        self.app = app.test_client()
        self.Popen = MockPopen()
        self.r = Replacer()
        self.r.replace('subprocess.Popen', self.Popen)
        self.r.replace('tempfile.mkstemp', self.mock_mkstemp)
        self.addCleanup(self.r.restore)
    #------------------------------------------------------------------------------


#    def tearDown(self):
#        if hasattr(self, 'fd'):
#            self.fd.close()
    #------------------------------------------------------------------------------

    def test_access_root(self):
        rv = self.app.get('/')
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_cpp_options_order_root(self):
        rv = self.app.get('/')
        data = rv.data.decode("utf-8").splitlines()
        opts = [ 'C++ Standard',
                 'cpp98',
                 'cpp11',
                 'cpp14',
                 'cpp17',
                 'cpp2a',
                 'Alternative Styles',
                 'alt-syntax-for',
                 'alt-syntax-subscription',
                 'More Transformations',
#                 'stdinitlist',
                 'all-implicit-casts',
                ]

        regEx = re.compile(r'[value|label]="(.*?)"' )
        regExGroup = re.compile(r'label="(.*?)"' ) # optgroup label=
        options = []
        for line in data:
            line = line.strip()
            if not line.startswith('<option') and not line.startswith('<optgroup'):
                continue

            m = regEx.search(line)
            if None != m:
                options.append(m.group(1))


        assert opts == options
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_invalid_site(self):
        rv = self.app.get('/aa')
        assert b'Page Not Found' in rv.data
    #------------------------------------------------------------------------------

    def test_invalid_site_with_post(self):
        rv = self.app.post('/test_function',
                       data=json.dumps(dict(foo='bar')),
                       content_type='application/json')

        assert 405 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++98', stdout=b'o', stderr=b'', returncode=0)

        rv = self.app.post('/api/v1/transform',
                       data=json.dumps(dict(insightsOptions=['cpp98'], code='hello')),
                       content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid_with_result_1(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++98', stdout=b'o', stderr=b'', returncode=1)

        rv = self.app.post('/api/v1/transform',
                       data=json.dumps(dict(insightsOptions=['cpp98'], code='hello')),
                       content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 1)
        self.assertTrue(data['stderr'] == 'Insights exited with result code: 1')
        self.assertTrue(data['stdout'] == 'Compilation failed!')
        assert b'Compilation failed!' in data['stdout'].encode()
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid_with_result_and_insights_args_1(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -alt-syntax-for -- -std=c++98', stdout=b'o', stderr=b'', returncode=0)

        rv = self.app.post('/api/v1/transform',
                       data=json.dumps(dict(insightsOptions=['alt-syntax-for','cpp98'], code='hello')),
                       content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid_with_warnings(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++98', stdout=b'o', stderr=b'Warning: unused var', returncode=0)

        rv = self.app.post('/api/v1/transform',
                       data=json.dumps(dict(insightsOptions=['cpp98'], code='hello')),
                       content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(data['stderr'] == 'Warning: unused var')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_invalid_std(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++17', stdout=b'o', stderr=b'',returncode=0)

        rv = self.app.post('/api/v1/transform',
                       data=json.dumps(dict(insightsOptions=['cpp12'], code='hello')),
                       content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_api_v1_version(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none --rm -i insights-test', stdout=b'o', stderr=b'', returncode=0)

        rv = self.app.get('/api/v1/version',
                       content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_api_v1_version_invalid(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none --rm -i insights-test', stdout=b'o', stderr=b'', returncode=1)

        rv = self.app.get('/api/v1/version',
                       content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 1)
        self.assertTrue(data['stderr'] == 'Insights exited with result code: 1')
        assert b'Compilation failed!' in data['stdout'].encode()
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_request_version(self):
        self.Popen.set_command('sudo -u pfes docker run --net=none --rm -i insights-test', stdout=b'fake version info from docker', stderr=b'', returncode=0)

        rv = self.app.get('/version')

        assert b'fake version info from docker' in rv.data
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def selectedStandard(self, cppStd, text):
        return ('<option value="%s" class="single"  selected="selected" >%s</option>' %(cppStd, text)).encode()
    #------------------------------------------------------------------------------

    def test_link_rev_1_valid(self):
        rv = self.app.post('/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp11&rev=1.0',follow_redirects=True)
        assert self.selectedStandard('cpp11', 'C++ 11') in rv.data
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_link_rev_1_invalid_std(self):
        rv = self.app.post('/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp12&rev=1.0',follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_link_rev_1_missing_std(self):
        rv = self.app.post('/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&rev=1.0',follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert 200 == rv.status_code
    #------------------------------------------------------------------------------

    def test_link_rev_1_missing_rev(self):
        rv = self.app.post('/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp11',follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert b'The revision of the link is invalid.' in rv.data
        assert 404 == rv.status_code
    #------------------------------------------------------------------------------

    def test_link_rev_1_invalid_rev(self):
        rv = self.app.post('/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp11&rev=22',follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert b'The revision of the link is invalid.' in rv.data
        assert 404 == rv.status_code
    #------------------------------------------------------------------------------

    def test_link_rev_1_invalid_base64(self):
        rv = self.app.post('/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQAAAAAAAAAAJiB4IDogYXJyKSB7IH0KfQ==&std=cpp11&rev=1.0',follow_redirects=True)
        assert self.selectedStandard('cpp11', 'C++ 11') in rv.data
        assert 200 == rv.status_code
Example #39
0
 def test_command_not_specified(self):
     Popen = MockPopen()
     with ShouldRaise(
             KeyError("Nothing specified for command 'a command'")):
         Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
Example #40
0
 def setUp(self):
     self.Popen = MockPopen()
Example #41
0
class CppInsightsTestCase(unittest.TestCase):
    def mock_mkstemp(self, suffix=None, prefix=None, dir=None, text=False):
        self.fd = open('/tmp/pyt.cpp', "w+")
        return [self.fd.fileno(), '/tmp/pyt.cpp']

    #------------------------------------------------------------------------------

    @staticmethod
    def removeDbTestFile(cls):
        if os.path.exists(cls.getDbNameMock()):
            os.remove(cls.getDbNameMock())

    #------------------------------------------------------------------------------

    @staticmethod
    def getDbNameMock():
        return 'urls_test.db'

    #------------------------------------------------------------------------------

    @staticmethod
    def removeCommunityEventTestFile(cls):
        if os.path.exists(cls.getCommunityEventFileNameMock()):
            os.remove(cls.getCommunityEventFileNameMock())

    #------------------------------------------------------------------------------

    @staticmethod
    def getCommunityEventFileNameMock():
        return 'communityevent_test.txt'

    #------------------------------------------------------------------------------

    @classmethod
    def setUpClass(cls):
        cls.removeDbTestFile(cls)
        cls.removeCommunityEventTestFile(cls)

    #------------------------------------------------------------------------------

    def setUp(self):
        self.app = app.test_client()
        self.Popen = MockPopen()
        self.r = Replacer()
        self.r.replace('subprocess.Popen', self.Popen)
        self.r.replace('tempfile.mkstemp', self.mock_mkstemp)
        self.r.replace('app.getDbName', self.getDbNameMock)
        self.r.replace('app.getCommunityEventFileName',
                       self.getCommunityEventFileNameMock)
        self.addCleanup(self.r.restore)

    #------------------------------------------------------------------------------

    @classmethod
    def tearDownClass(cls):
        cls.removeDbTestFile(cls)

    #------------------------------------------------------------------------------

#    def tearDown(self):
##        if None != self.fd:
#            self.fd.close()
#        if hasattr(self, 'fd'):
#            self.fd.close()
#------------------------------------------------------------------------------

    def test_access_root(self):
        rv = self.app.get('/')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_cpp_options_order_root(self):
        rv = self.app.get('/')
        data = rv.data.decode("utf-8").splitlines()
        opts = [
            'C++ Standard',
            'cpp98',
            'cpp11',
            'cpp14',
            'cpp17',
            'cpp2a',
            'Alternative Styles',
            'alt-syntax-for',
            'alt-syntax-subscription',
            'More Transformations',
            'all-implicit-casts',
            'use-libcpp',
            'edu-show-initlist',
        ]

        regEx = re.compile(r'[value|label]="(.*?)"')
        regExGroup = re.compile(r'label="(.*?)"')  # optgroup label=
        options = []
        for line in data:
            line = line.strip()
            if not line.startswith('<option') and not line.startswith(
                    '<optgroup'):
                continue

            if -1 != line.find('class="fonts"'):
                continue

            m = regEx.search(line)
            if None != m:
                options.append(m.group(1))

        assert opts == options
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_invalid_site(self):
        rv = self.app.get('/aa')
        assert b'Page Not Found' in rv.data

    #------------------------------------------------------------------------------

    def test_invalid_site_with_post(self):
        rv = self.app.post('/test_function',
                           data=json.dumps(dict(foo='bar')),
                           content_type='application/json')

        assert 405 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid(self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++98',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.post('/api/v1/transform',
                           data=json.dumps(
                               dict(insightsOptions=['cpp98'], code='hello')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid_with_result_1(self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++98',
            stdout=b'o',
            stderr=b'',
            returncode=1)

        rv = self.app.post('/api/v1/transform',
                           data=json.dumps(
                               dict(insightsOptions=['cpp98'], code='hello')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 1)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 1')
        self.assertTrue(data['stdout'] == 'Compilation failed!')
        assert b'Compilation failed!' in data['stdout'].encode()
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid_with_result_and_insights_args_1(
            self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -alt-syntax-for -- -std=c++98',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.post('/api/v1/transform',
                           data=json.dumps(
                               dict(
                                   insightsOptions=['alt-syntax-for', 'cpp98'],
                                   code='hello')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_valid_with_warnings(self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++98',
            stdout=b'o',
            stderr=b'Warning: unused var',
            returncode=0)

        rv = self.app.post('/api/v1/transform',
                           data=json.dumps(
                               dict(insightsOptions=['cpp98'], code='hello')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(data['stderr'] == 'Warning: unused var')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_tranform_invalid_std(self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none -v /tmp/pyt.cpp:/home/insights/insights.cpp --rm -i insights-test -- -std=c++17',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.post('/api/v1/transform',
                           data=json.dumps(
                               dict(insightsOptions=['cpp12'], code='hello')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(data['stdout'] == 'o')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_version(self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none --rm -i insights-test --',
            stdout=b'o',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format \'{{.ID}} {{.CreatedAt}}\'',
            stdout=b'o',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format {{.ID}} {{.CreatedAt}}',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.get('/api/v1/version', content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(
            data['stdout'] == 'o\nDocker image "insights-test" info: o\n')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_version_invalid(self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none --rm -i insights-test --',
            stdout=b'o',
            stderr=b'',
            returncode=1)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format \'{{.ID}} {{.CreatedAt}}\'',
            stdout=b'o',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format {{.ID}} {{.CreatedAt}}',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.get('/api/v1/version', content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 1)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 1')
        assert b'Compilation failed!' in data['stdout'].encode()
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_version_none_sudo(self):
        app.config['USE_SUDO'] = False
        self.Popen.set_command(
            'docker run --net=none --rm -i insights-test --',
            stdout=b'o',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format \'{{.ID}} {{.CreatedAt}}\'',
            stdout=b'o',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format {{.ID}} {{.CreatedAt}}',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.get('/api/v1/version', content_type='application/json')

        app.config['USE_SUDO'] = True

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(
            data['stdout'] == 'o\nDocker image "insights-test" info: o\n')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_api_v1_version_none_docker(self):
        app.config['USE_DOCKER'] = False
        self.Popen.set_command('insights /tmp/pyt.cpp --',
                               stdout=b'o',
                               stderr=b'',
                               returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format \'{{.ID}} {{.CreatedAt}}\'',
            stdout=b'o',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format {{.ID}} {{.CreatedAt}}',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.get('/api/v1/version', content_type='application/json')

        app.config['USE_DOCKER'] = True

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)
        self.assertTrue(
            data['stderr'] == 'Insights exited with result code: 0')
        self.assertTrue(
            data['stdout'] ==
            'o\nDocker image "insights-test" info: Docker not used\n')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_request_version(self):
        self.Popen.set_command(
            'sudo -u pfes docker run --net=none --rm -i insights-test --',
            stdout=b'fake version info from docker',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format \'{{.ID}} {{.CreatedAt}}\'',
            stdout=b'o',
            stderr=b'',
            returncode=0)
        self.Popen.set_command(
            'docker images --filter=reference=insights-test --format {{.ID}} {{.CreatedAt}}',
            stdout=b'o',
            stderr=b'',
            returncode=0)

        rv = self.app.get('/version')

        assert b'fake version info from docker' in rv.data
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def selectedStandard(self, cppStd, text):
        return (
            '<option value="%s" class="single"  selected="selected" >\n              %s</option>'
            % (cppStd, text)).encode()

    #------------------------------------------------------------------------------

    def test_link_rev_1_valid(self):
        rv = self.app.post(
            '/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp11&rev=1.0',
            follow_redirects=True)
        assert self.selectedStandard('cpp11', 'C++ 11') in rv.data
        assert b'<meta property="og:title" content="C++ Insights" />' in rv.data
        assert b'<meta property="og:description" content="#include &lt;cstdio&gt;\ntemplate&lt;typename U&gt;\nclass X\n{\npublic:\n    X()           = default;\n    X(const X&amp;" />' in rv.data
        assert b'<title>C++ Insights</title>' in rv.data

    #------------------------------------------------------------------------------

    def test_link_rev_1_invalid_std(self):
        rv = self.app.post(
            '/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp12&rev=1.0',
            follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_link_rev_1_missing_std(self):
        rv = self.app.post(
            '/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&rev=1.0',
            follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_link_rev_1_missing_rev(self):
        rv = self.app.post(
            '/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp11',
            follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert b'The revision of the link is invalid.' in rv.data
        assert 404 == rv.status_code

    #------------------------------------------------------------------------------

    def test_link_rev_1_invalid_rev(self):
        rv = self.app.post(
            '/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp11&rev=22',
            follow_redirects=True)
        assert self.selectedStandard('cpp17', 'C++ 17') in rv.data
        assert b'The revision of the link is invalid.' in rv.data
        assert 404 == rv.status_code

    #------------------------------------------------------------------------------

    def test_link_rev_1_invalid_base64(self):
        rv = self.app.post(
            '/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQAAAAAAAAAAJiB4IDogYXJyKSB7IH0KfQ==&std=cpp11&rev=1.0',
            follow_redirects=True)
        assert self.selectedStandard('cpp11', 'C++ 11') in rv.data
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def getShortLink(self,
                     code,
                     std='cpp98',
                     rev='1.0',
                     opts=['alt-syntax-for'],
                     description=''):
        return self.app.post(
            '/api/v1/getshortlink',
            data=json.dumps(
                dict(options=opts,
                     code=createBase64EncodedString(code),
                     desc=createBase64EncodedString(description),
                     rev=rev,
                     std=std)),
            content_type='application/json')

    #------------------------------------------------------------------------------

    def test_create_short_link(self):
        rv = self.getShortLink('hello')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_short_link_size(self):
        rv = self.getShortLink('slzhello')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        shortLink = shortLink.replace('/s/', '')
        assert len(shortLink) == 8
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_create_short_link_twice(self):
        # Request a link the first time
        rv = self.getShortLink('hellosame')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

        # Request a link for the same parameters a second time
        rv = self.getShortLink('hellosame')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 1)

        shortLink2 = data['shortlink']

        assert None != shortLink2
        assert shortLink2.startswith('/s/')
        assert 200 == rv.status_code

        assert shortLink == shortLink2

    #------------------------------------------------------------------------------

    def test_create_short_link_none_base64(self):
        rv = self.app.post('/api/v1/getshortlink',
                           data=json.dumps(
                               dict(options=['alt-syntax-for'],
                                    code='not-base64-encoded',
                                    desc='',
                                    rev='1.0',
                                    std='cpp98')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 2)

        shortLink = data['shortlink']

        assert None != shortLink
        assert 'No source' == shortLink
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_create_short_link_null(self):
        rv = self.app.post('/api/v1/getshortlink',
                           data=json.dumps(
                               dict(options=['alt-syntax-for'],
                                    code=None,
                                    rev='1.0',
                                    desc='',
                                    std='cpp98')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 2)

        shortLink = data['shortlink']

        assert None != shortLink
        assert 'No source' == shortLink
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_short_link_code_differ(self):
        # Request a link the first time
        rv = self.getShortLink('hellosamed')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

        # Request a link for 'nearly' the same parameters
        rv = self.getShortLink('bellosamed')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink2 = data['shortlink']

        assert None != shortLink2
        assert shortLink2.startswith('/s/')
        assert 200 == rv.status_code

        assert shortLink != shortLink2

    #------------------------------------------------------------------------------

    def test_short_link_options_differ(self):
        # Request a link the first time
        rv = self.getShortLink('cellosamed')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

        # Request a link for 'nearly' the same parameters
        rv = self.getShortLink('cellosamed', opts=['alt-syntax-subscription'])

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink2 = data['shortlink']

        assert None != shortLink2
        assert shortLink2.startswith('/s/')
        assert 200 == rv.status_code

        assert shortLink != shortLink2

    #------------------------------------------------------------------------------

    def test_short_link_std_differ(self):
        # Request a link the first time
        rv = self.getShortLink('mellosamed', std='cpp11')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

        # Request a link for 'nearly' the same parameters
        rv = self.getShortLink('mellosamed', std='cpp14')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink2 = data['shortlink']

        assert None != shortLink2
        assert shortLink2.startswith('/s/')
        assert 200 == rv.status_code

        assert shortLink != shortLink2

    #------------------------------------------------------------------------------

    def test_short_link_multiple_options(self):
        rv = self.getShortLink(createBase64EncodedString('multiple options'),
                               opts=['alt-syntax-subscription,alt-syntax-for'])

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

        rv2 = self.app.get(shortLink, follow_redirects=False)
        assert 200 == rv2.status_code
        # XXX not working
#        assert shortLink != shortLink2
#------------------------------------------------------------------------------

    def test_invalid_short_link(self):
        rv = self.app.get('/s/invalid', follow_redirects=True)

        assert 404 == rv.status_code
        assert b'// There is no such link.' in rv.data

    #------------------------------------------------------------------------------

    def test_invalid_short_link_root(self):
        rv = self.app.get('/s', follow_redirects=True)

        assert 404 == rv.status_code
        assert b'Sorry, the content your are looking for is not there.' in rv.data

    #------------------------------------------------------------------------------

    def test_create_max_length_short_link(self):
        s = 'a' * 1000000

        rv = self.app.post('/api/v1/getshortlink',
                           data=json.dumps(
                               dict(options=['alt-syntax-for'],
                                    code=createBase64EncodedString(s),
                                    rev='1.0',
                                    desc='',
                                    std='cpp98')),
                           content_type='application/json')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_create_too_long_short_link(self):
        s = 'a' * 1000001

        rv = self.getShortLink(createBase64EncodedString(s))

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 1)

        shortLink = data['shortlink']

        assert None != shortLink
        assert 'Source too long' == shortLink
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_favicon(self):
        rv = self.app.get('/favicon.ico')

        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_twitter_description_for_root(self):
        rv = self.app.get('/')

        assert b'<meta property="og:description" content="C++ Insights - See your source code with the eyes of a compiler." />' in rv.data
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_twitter_description_for_link(self):
        rv = self.app.post(
            '/lnk?code=I2luY2x1ZGUgPGNzdGRpbz4KdGVtcGxhdGU8dHlwZW5hbWUgVT4KY2xhc3MgWAp7CnB1YmxpYzoKICAgIFgoKSAgICAgICAgICAgPSBkZWZhdWx0OwogICAgWChjb25zdCBYJiB4KSA9IGRlZmF1bHQ7CgogICAgdGVtcGxhdGU8dHlwZW5hbWUgVD4KICAgIFgoVCYmIHgpCiAgICA6IG1Ye30KICAgIHsgfQoKcHJpdmF0ZToKICAgIFUgbVg7Cn07CgppbnQgbWFpbigpCnsKICAgIFg8aW50PiBhcnJbMl17fTsKCiAgICBmb3IoY29uc3QgWDxjb25zdCBpbnQ+JiB4IDogYXJyKSB7IH0KfQ==&std=cpp11&rev=1.0',
            follow_redirects=True)
        assert b'<meta property="og:description" content="#include &lt;cstdio&gt;\ntemplate&lt;typename U&gt;\nclass X\n{\npublic:\n    X()           = default;\n    X(const X&amp;" />' in rv.data
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_twitter_description_for_short_link(self):
        rv = self.getShortLink(
            '#include <cstdio> int main() { printf("hello\n"); }')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert 200 == rv.status_code

        rv2 = self.app.get(shortLink, follow_redirects=False)

        assert 200 == rv2.status_code
        assert b'<meta property="og:description" content="#include &lt;cstdio&gt; int main() { printf(&#34;hello\n&#34;); }" />' in rv2.data

    #------------------------------------------------------------------------------

    def test_create_short_link_with_description(self):
        rv = self.getShortLink('hello with description',
                               description='A description')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        rv2 = self.app.get(shortLink, follow_redirects=False)

        assert b'<meta property="og:title" content="C++ Insights - A description" />' in rv2.data
        assert b'<meta name="description" content="C++ Insights - A description" />' in rv2.data
        assert b'<title>C++ Insights - A description</title>' in rv2.data
        assert 200 == rv2.status_code

    #------------------------------------------------------------------------------

    def test_root_description(self):
        rv = self.app.get('/', follow_redirects=False)

        assert b'<meta property="og:title" content="C++ Insights" />' in rv.data
        assert b'<meta property="og:description" content="C++ Insights - See your source code with the eyes of a compiler." />' in rv.data
        assert b'<title>C++ Insights</title>' in rv.data
        assert 200 == rv.status_code

    #------------------------------------------------------------------------------

    def test_short_link_toolid_0(self):
        rv = self.getShortLink('test for tool id')

        data = json.loads(rv.data.decode('utf-8'))
        self.assertTrue(data['returncode'] == 0)

        shortLink = data['shortlink']

        assert None != shortLink
        assert shortLink.startswith('/s/')
        assert 200 == rv.status_code

        conn = sqlite3.connect(self.getDbNameMock())
        c = conn.cursor()
        cur = c.execute('SELECT toolid FROM shortened WHERE short = ?',
                        (shortLink.replace('/s/', ''), ))
        rv = cur.fetchone()
        conn.close()

        assert 1 == rv[0]

    #------------------------------------------------------------------------------

    def test_community_event_no_file(self):
        import app
        link, title = app.getCommunityEvent()

        assert None == link
        assert None == title

    #------------------------------------------------------------------------------

    def test_community_event_empty_file(self):
        # create empty file
        with open(self.getCommunityEventFileNameMock(), 'w') as f:
            pass

        import app
        link, title = app.getCommunityEvent()
        self.removeCommunityEventTestFile(self)

        assert None == link
        assert None == title

    #------------------------------------------------------------------------------

    def test_community_event_valid_file(self):
        # create empty file
        testLink = 'https://event.com'
        testTitle = 'Some Event'

        with open(self.getCommunityEventFileNameMock(), 'w') as f:
            f.write('%s;%s' % (testLink, testTitle))

        import app
        link, title = app.getCommunityEvent()
        self.removeCommunityEventTestFile(self)

        assert testLink == link
        assert testTitle == title

    #------------------------------------------------------------------------------

    def test_get_app(self):
        import app
        rv = app.getApp()

        assert None != rv
Example #42
0
class TestSlurmDockerRunner(unittest.TestCase):
    def setUp(self):
        log.setLevel('CRITICAL')
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace('popper.runner_host.Popen', self.Popen)
        self.addCleanup(replacer.restore)

    def tearDown(self):
        log.setLevel('NOTSET')

    def test_create_cmd(self):
        config = {'workspace_dir': '/w'}
        with DockerRunner(config=PopperConfig(**config)) as drunner:
            step = {'args': ['-two', '-flags']}
            cmd = drunner._create_cmd(step, 'foo:1.9', 'container_name')

            expected = ('docker create'
                        ' --name container_name'
                        ' --workdir /workspace'
                        ' -v /w:/workspace'
                        ' -v /var/run/docker.sock:/var/run/docker.sock'
                        ' foo:1.9 -two -flags')

            self.assertEqual(expected, cmd)

        config_dict = {
            'engine': {
                'name': 'docker',
                'options': {
                    'privileged': True,
                    'hostname': 'popper.local',
                    'domainname': 'www.example.org',
                    'volumes': ['/path/in/host:/path/in/container'],
                    'environment': {
                        'FOO': 'bar'
                    }
                }
            },
            'resource_manager': {
                'name': 'slurm'
            }
        }

        config = {'workspace_dir': '/w', 'config_file': config_dict}
        with DockerRunner(config=PopperConfig(**config)) as drunner:
            step = {'args': ['-two', '-flags']}
            cmd = drunner._create_cmd(step, 'foo:1.9', 'container_name')

            expected = ('docker create --name container_name '
                        '--workdir /workspace '
                        '-v /w:/workspace '
                        '-v /var/run/docker.sock:/var/run/docker.sock '
                        '-v /path/in/host:/path/in/container '
                        '-e FOO=bar --privileged --hostname popper.local '
                        '--domainname www.example.org '
                        'foo:1.9 -two -flags')

            self.assertEqual(expected, cmd)

    @replace('popper.runner_slurm.os.kill', mock_kill)
    def test_run(self, mock_kill):
        self.Popen.set_command(
            'sbatch --wait --job-name popper_1_123abc '
            '--output /tmp/popper/slurm/popper_1_123abc.out '
            '/tmp/popper/slurm/popper_1_123abc.sh',
            returncode=0)

        self.Popen.set_command('tail -f /tmp/popper/slurm/popper_1_123abc.out',
                               returncode=0)

        config_dict = {
            'engine': {
                'name': 'docker',
                'options': {
                    'privileged': True,
                    'hostname': 'popper.local',
                    'domainname': 'www.example.org',
                    'volumes': ['/path/in/host:/path/in/container'],
                    'environment': {
                        'FOO': 'bar'
                    }
                }
            },
            'resource_manager': {
                'name': 'slurm'
            }
        }

        config = PopperConfig(workspace_dir='/w', config_file=config_dict)
        config.wid = "123abc"

        with WorkflowRunner(config) as r:
            wf = YMLWorkflow("""
            version: '1'
            steps:
            - uses: 'popperized/bin/sh@master'
              runs: [cat]
              args: README.md
            """)
            wf.parse()
            r.run(wf)

        with open('/tmp/popper/slurm/popper_1_123abc.sh', 'r') as f:
            content = f.read()
            self.assertEqual(
                content, f"""#!/bin/bash
docker rm -f popper_1_123abc || true
docker build -t popperized/bin:master {os.environ['HOME']}/.cache/popper/123abc/github.com/popperized/bin/sh
docker create --name popper_1_123abc --workdir /workspace --entrypoint cat -v /w:/workspace -v /var/run/docker.sock:/var/run/docker.sock -v /path/in/host:/path/in/container -e FOO=bar --privileged --hostname popper.local --domainname www.example.org popperized/bin:master README.md
docker start --attach popper_1_123abc""")
Example #43
0
 def test_invalid_attribute(self):
     Popen = MockPopen()
     Popen.set_command('command')
     process = Popen('command')
     with ShouldRaise(AttributeError("Mock object has no attribute 'foo'")):
         process.foo
Example #44
0
class TestSlurmSlurmRunner(PopperTest):
    def setUp(self):
        log.setLevel("CRITICAL")
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace("popper.runner_host.Popen", self.Popen)
        self.addCleanup(replacer.restore)

    def tearDown(self):
        log.setLevel("NOTSET")

    def test_tail_output(self):
        self.Popen.set_command("tail -f slurm-x.out", returncode=0)
        with SlurmRunner(config=ConfigLoader.load()) as sr:
            self.assertEqual(sr._tail_output("slurm-x.out"), 0)
            self.assertEqual(len(sr._out_stream_pid), 1)

    def test_stop_running_tasks(self):
        self.Popen.set_command("scancel --name job_a", returncode=0)
        with SlurmRunner(config=ConfigLoader.load()) as sr:
            sr._spawned_jobs.add("job_a")
            sr.stop_running_tasks()
            compare(
                call.Popen(
                    ["scancel", "--name", "job_a"],
                    cwd=os.getcwd(),
                    env=None,
                    preexec_fn=os.setsid,
                    stderr=-2,
                    stdout=-1,
                    universal_newlines=True,
                ),
                self.Popen.all_calls[0],
            )

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_exec_srun(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {},
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "sample": {
                        "gpus-per-task": 2,
                        "overcommit": True
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)
        self.Popen.set_command(
            "srun --nodes 1 --ntasks 1 --ntasks-per-node 1 --gpus-per-task 2 --overcommit ls -la",
            returncode=0,
        )
        step = Box({"id": "sample"}, default_box=True)
        with SlurmRunner(config=config) as sr:
            e = sr._exec_srun(["ls", "-la"], step, logging=True)
            self.assertEqual(e, 0)

        call_srun = call.Popen(
            [
                "srun",
                "--nodes",
                "1",
                "--ntasks",
                "1",
                "--ntasks-per-node",
                "1",
                "--gpus-per-task",
                "2",
                "--overcommit",
                "ls",
                "-la",
            ],
            cwd=os.getcwd(),
            env=None,
            preexec_fn=os.setsid,
            stderr=-2,
            stdout=-1,
            universal_newlines=True,
        )

        self.assertEqual(call_srun in self.Popen.all_calls, True)

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_exec_mpi(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {},
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "sample": {
                        "gpus-per-task": 2,
                        "overcommit": True
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)
        self.Popen.set_command(
            "sbatch "
            "--wait --gpus-per-task 2 --overcommit "
            f"popper_sample_{config.wid}.sh",
            returncode=0,
        )
        self.Popen.set_command(f"tail -f popper_sample_{config.wid}.out",
                               returncode=0)
        step = Box({"id": "sample"}, default_box=True)
        with SlurmRunner(config=config) as sr:
            e = sr._exec_mpi(["ls -la"], step)
            self.assertEqual(e, 0)
            with open(f"popper_sample_{config.wid}.sh", "r") as f:
                content = f.read()

            self.assertEqual(
                content,
                f"""#!/bin/bash
#SBATCH --job-name=popper_sample_{config.wid}
#SBATCH --output=popper_sample_{config.wid}.out
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --ntasks-per-node=1
mpirun ls -la""",
            )
            self.assertEqual(len(sr._spawned_jobs), 0)
            self.assertEqual(sr._out_stream_thread.is_alive(), False)

        call_tail = call.Popen(
            ["tail", "-f", f"popper_sample_{config.wid}.out"],
            cwd=os.getcwd(),
            env=None,
            preexec_fn=os.setsid,
            stderr=-2,
            stdout=-1,
            universal_newlines=True,
        )

        call_sbatch = call.Popen(
            [
                "sbatch",
                "--wait",
                "--gpus-per-task",
                "2",
                "--overcommit",
                f"popper_sample_{config.wid}.sh",
            ],
            cwd=os.getcwd(),
            env=None,
            preexec_fn=os.setsid,
            stderr=-2,
            stdout=-1,
            universal_newlines=True,
        )

        self.assertEqual(call_tail in self.Popen.all_calls, True)
        self.assertEqual(call_sbatch in self.Popen.all_calls, True)

    def test_dry_run(self):
        config = ConfigLoader.load(engine_name="singularity",
                                   resman_name="slurm",
                                   dry_run=True)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "docker://alpine",
                    "runs": ["cat"],
                    "args": ["README.md"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

        self.assertEqual(self.Popen.all_calls, [])

    # @replace("popper.runner_slurm.os.kill", mock_kill)
    # def test_exec_srun_failure(self, mock_kill):
    #     config_dict = {
    #         "engine": {
    #             "name": "singularity",
    #             "options": {
    #                 "privileged": True,
    #                 "hostname": "popper.local",
    #                 "domainname": "www.example.org",
    #                 "volumes": ["/path/in/host:/path/in/container"],
    #                 "environment": {"FOO": "bar"},
    #             },
    #         },
    #         "resource_manager": {
    #             "name": "slurm",
    #             "options": {"1": {"nodes": 2, "nodelist": "worker1,worker2"}},
    #         },
    #     }

    #     config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

    #     self.Popen.set_command(
    #         f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 podman rm -f popper_1_{config.wid}",
    #         returncode=0,
    #     )

    #     self.Popen.set_command(
    #         f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 podman pull alpine:latest",
    #         returncode=0,
    #     )

    #     self.Popen.set_command(
    #         f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 podman create --name popper_1_{config.wid} --workdir /workspace -v /w:/workspace:Z -v /path/in/host:/path/in/container -e FOO=bar --privileged --hostname popper.local --domainname www.example.org alpine:latest ls",
    #         returncode=0,
    #     )

    #     self.Popen.set_command(
    #         f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 podman start --attach popper_1_{config.wid}",
    #         returncode=12,
    #     )

    #     with WorkflowRunner(config) as r:
    #         wf_data = {"steps": [{"uses": "docker://alpine", "args": ["ls"]}]}
    #         self.assertRaises(SystemExit, r.run, WorkflowParser.parse(wf_data=wf_data))

    @replace("popper.runner_slurm.os.kill", mock_kill)
    def test_exec_mpi_failure(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {},
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "1": {
                        "nodes": 2,
                        "nodelist": "worker1,worker2",
                        "overcommit": True
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            "sbatch "
            "--wait --overcommit "
            f"popper_1_{config.wid}.sh",
            returncode=12,
        )

        self.Popen.set_command(f"tail -f popper_1_{config.wid}.out",
                               returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {"steps": [{"uses": "docker://alpine", "args": ["ls"]}]}
            self.assertRaises(SystemExit, r.run,
                              WorkflowParser.parse(wf_data=wf_data))
Example #45
0
 def setUp(self):
     log.setLevel('CRITICAL')
     self.Popen = MockPopen()
     replacer = Replacer()
     replacer.replace('popper.runner_host.Popen', self.Popen)
     self.addCleanup(replacer.restore)
Example #46
0
 def setUp(self):
     self.popen = MockPopen()
     replacer = Replacer()
     replacer.replace('testfixtures.tests.test_popen.subprocess.Popen',
                      self.popen)
     self.addCleanup(replacer.restore)
Example #47
0
 def setUp(self):
     self.Popen = MockPopen()
     replacer = Replacer()
     replacer.replace("popper.runner_host.Popen", self.Popen)
     self.addCleanup(replacer.restore)
Example #48
0
 def test_invalid_parameters(self):
     Popen = MockPopen()
     with ShouldRaise(
             TypeError("Popen() got an unexpected keyword argument 'foo'")):
         Popen(foo='bar')
Example #49
0
class TestMyFunc(TestCase):

    def setUp(self):
        self.Popen = MockPopen()
        self.r = Replacer()
        self.r.replace(dotted_path, self.Popen)
        self.addCleanup(self.r.restore)

    def test_example(self):
        # set up
        self.Popen.set_command('svn ls -R foo', stdout=b'o', stderr=b'e')

        # testing of results
        compare(my_func(), b'o')

        # testing calls were in the right order and with the correct parameters:
        compare([
            call.Popen('svn ls -R foo',
                       shell=True, stderr=PIPE, stdout=PIPE),
            call.Popen_instance.communicate()
            ], Popen.mock.method_calls)

    def test_example_bad_returncode(self):
        # set up
        Popen.set_command('svn ls -R foo', stdout=b'o', stderr=b'e',
                          returncode=1)

        # testing of error
        with ShouldRaise(RuntimeError('something bad happened')):
            my_func()

    def test_communicate_with_input(self):
        # setup
        Popen = MockPopen()
        Popen.set_command('a command')
        # usage
        process = Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
        out, err = process.communicate('foo')
        # test call list
        compare([
                call.Popen('a command', shell=True, stderr=-1, stdout=-1),
                call.Popen_instance.communicate('foo'),
                ], Popen.mock.method_calls)

    def test_read_from_stdout_and_stderr(self):
        # setup
        Popen = MockPopen()
        Popen.set_command('a command', stdout=b'foo', stderr=b'bar')
        # usage
        process = Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
        compare(process.stdout.read(), b'foo')
        compare(process.stderr.read(), b'bar')
        # test call list
        compare([
                call.Popen('a command', shell=True, stderr=PIPE, stdout=PIPE),
                ], Popen.mock.method_calls)

    def test_wait_and_return_code(self):
        # setup
        Popen = MockPopen()
        Popen.set_command('a command', returncode=3)
        # usage
        process = Popen('a command')
        compare(process.returncode, None)
        # result checking
        compare(process.wait(), 3)
        compare(process.returncode, 3)
        # test call list
        compare([
                call.Popen('a command'),
                call.Popen_instance.wait(),
                ], Popen.mock.method_calls)

    def test_send_signal(self):
        # setup
        Popen = MockPopen()
        Popen.set_command('a command')
        # usage
        process = Popen('a command', stdout=PIPE, stderr=PIPE, shell=True)
        process.send_signal(0)
        # result checking
        compare([
                call.Popen('a command', shell=True, stderr=-1, stdout=-1),
                call.Popen_instance.send_signal(0),
                ], Popen.mock.method_calls)

    def test_poll_until_result(self):
        # setup
        Popen = MockPopen()
        Popen.set_command('a command', returncode=3, poll_count=2)
        # example usage
        process = Popen('a command')
        while process.poll() is None:
            # you'd probably have a sleep here, or go off and
            # do some other work.
            pass
        # result checking
        compare(process.returncode, 3)
        compare([
                call.Popen('a command'),
                call.Popen_instance.poll(),
                call.Popen_instance.poll(),
                call.Popen_instance.poll(),
                ], Popen.mock.method_calls)
Example #50
0
class GitRepoTestCase():
    def setup_method(self, method):
        self.log.info('GitRepoTestCase.setup_method({})'.format(method))
        # build temporary directory
        self.tempdir = TemporaryDirectory()
        # repository mockup (in a temporary place)
        self.repository = Repo.init(self.tempdir.name)
        # setup git command mockup
        self.Popen = MockPopen()
        self.Popen.mock.Popen_instance.stdin = None
        self.Popen.mock.Popen_instance.wait = lambda *a, **k: self.Popen.wait()
        self.Popen.mock.Popen_instance.__enter__ = lambda self: self
        self.Popen.mock.Popen_instance.__exit__ = lambda self, *a, **k: None
        # when initiating service with no repository, the connection is not triggered
        self.service = self.get_service()
        self.service.repository = self.repository
        # setup http api mockup
        self.recorder = betamax.Betamax(self.get_requests_session())
        self.get_requests_session().headers['Accept-Encoding'] = 'identity'
        # have git commands logged
        Git.GIT_PYTHON_TRACE = True
        FORMAT = '> %(message)s'
        formatter = logging.Formatter(fmt=FORMAT)
        handler = logging.StreamHandler()
        handler.setFormatter(formatter)
        logging.getLogger('git.cmd').removeHandler(logging.NullHandler())
        logging.getLogger('git.cmd').addHandler(handler)
        logging.getLogger('git.cmd').propagate = True
        # have HTTP requests logged
        import http.client
        http.client.HTTPConnection.debuglevel = 1
        logging.getLogger('requests.packages.urllib3').setLevel(logging.DEBUG)
        logging.getLogger('requests.packages.urllib3').propagate = True

    def teardown_method(self, method):
        self.log.info('GitRepoTestCase.teardown_method({})'.format(method))
        self.tempdir.cleanup()

    '''popen helper'''

    def set_mock_popen_commands(self, cmd_list):
        for cmd, out, err, rc in cmd_list:
            self.Popen.set_command(cmd, out, err, returncode=rc)

    def mockup_git(self, namespace, repository):
        # disable refspec check
        from git import remote
        remote.Remote._assert_refspec = lambda self: None
        # write FETCH_HEAD ref
        with open(os.path.join(self.repository.git_dir, 'FETCH_HEAD'), 'w') as f:
            f.write("749656b8b3b282d11a4221bb84e48291ca23ecc6" \
                    "		branch 'master' of git@{}/{}/{}".format(self.service.fqdn, namespace, repository))
        return Replace('git.cmd.Popen', self.Popen)

    '''assertion helpers'''

    def assert_repository_exists(self, namespace, repository):
        try:
            self.service.get_repository(namespace, repository)
        except Exception as err:
            raise AssertionError("Repository {}/{} not found on {}: {}".format(namespace,
                                                                               repository,
                                                                               self.service.name,
                                                                               err)) from err

    def assert_repository_not_exists(self, namespace, repository):
        try:
            self.service.get_repository(namespace, repository)
        except Exception as err:
            return
        #raise AssertionError("Repository {}/{} exists on {}".format(namespace,
        #                                                                       repository,
        #                                                                       self.service.name,
        #                                                                ))

    def assert_added_remote(self, remote):
        try:
            self.repository.remote(remote)
        except ValueError as err:
            raise AssertionError("Remote {} not in repository".format(remote)) from err

    def assert_added_remote_defaults(self):
        self.assert_added_remote(self.service.name)
        self.assert_added_remote('all')

    def assert_tracking_remote(self, remote_name=None, branch_name='master'):
        if not remote_name:
            remote_name = self.service.name
        for branch in self.repository.branches:
            if branch == branch_name:
                assert remote_name in self.repository.branches[0].tracking_branch().name, \
                    'Could not set "{}" as tracking branch master'.format(self.service.name)

    '''test cases templates'''

    def action_fork(self, cassette_name, local_namespace, remote_namespace, repository):
        # hijack subprocess call
        with self.mockup_git(local_namespace, repository):
            # prepare output for git commands
            remote_slug = self.service.format_path(namespace=remote_namespace, repository=repository, rw=True)
            local_slug = self.service.format_path(namespace=local_namespace, repository=repository, rw=True)
            self.set_mock_popen_commands([
                ('git remote add upstream {}'.format(remote_slug), b'', b'', 0),
                ('git remote add all {}'.format(local_slug), b'', b'', 0),
                ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                ('git version', b'git version 2.8.0', b'', 0),
                ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                    'POST git-upload-pack (140 bytes)',
                    'remote: Counting objects: 8318, done.',
                    'remote: Compressing objects: 100% (3/3), done.',
                    'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    'Resolving deltas: 100% (5126/5126), done.',
                    'From {}:{}/{}'.format(self.service.fqdn, local_namespace, repository),
                    ' * branch            master     -> FETCH_HEAD',
                    ' * [new branch]      master     -> {}/master'.format(self.service.name)]).encode('utf-8'),
                0)
            ])
            with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
                self.service.connect()
                self.service.fork(remote_namespace, repository)
                # emulate the outcome of the git actions
                self.service.repository.create_remote('upstream', url=remote_slug)
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)

    def action_fork__no_clone(self, cassette_name, local_namespace, remote_namespace, repository):
        # hijack subprocess call
        with self.mockup_git(local_namespace, repository):
            # prepare output for git commands
            remote_slug = self.service.format_path(namespace=remote_namespace, repository=repository, rw=True)
            local_slug = self.service.format_path(namespace=local_namespace, repository=repository, rw=True)
            self.set_mock_popen_commands([
                ('git remote add upstream {}'.format(remote_slug), b'', b'', 0),
                ('git remote add all {}'.format(local_slug), b'', b'', 0),
                ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                ('git version', b'git version 2.8.0', b'', 0),
                ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                    'POST git-upload-pack (140 bytes)',
                    'remote: Counting objects: 8318, done.',
                    'remote: Compressing objects: 100% (3/3), done.',
                    'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    'Resolving deltas: 100% (5126/5126), done.',
                    'From {}:{}/{}'.format(self.service.fqdn, local_namespace, repository),
                    ' * branch            master     -> FETCH_HEAD',
                    ' * [new branch]      master     -> {}/master'.format(self.service.name)]).encode('utf-8'),
                0)
            ])
            with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
                self.service.connect()
                self.service.fork(remote_namespace, repository)
                # emulate the outcome of the git actions
                self.service.repository.create_remote('upstream', url=remote_slug)
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)

    def action_clone(self, cassette_name, namespace, repository):
        # hijack subprocess call
        with self.mockup_git(namespace, repository):
            local_slug = self.service.format_path(namespace=namespace, repository=repository, rw=True)
            self.set_mock_popen_commands([
                ('git remote add all {}'.format(local_slug), b'', b'', 0),
                ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                ('git version', b'git version 2.8.0', b'', 0),
                ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                    'POST git-upload-pack (140 bytes)',
                    'remote: Counting objects: 8318, done.',
                    'remote: Compressing objects: 100% (3/3), done.',
                    'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    'Resolving deltas: 100% (5126/5126), done.',
                    'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                    ' * branch            master     -> FETCH_HEAD',
                    ' * [new branch]      master     -> {}/master'.format(self.service.name)]).encode('utf-8'),
                0)
            ])
            with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
                self.service.connect()
                self.service.clone(namespace, repository)
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)

    def action_create(self, cassette_name, namespace, repository):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            self.service.create(namespace, repository, add=True)
            #
            self.assert_repository_exists(namespace, repository)
            self.assert_added_remote_defaults()

    def action_create__no_add(self, cassette_name, namespace, repository):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            self.service.create(namespace, repository, add=False)
            #
            self.assert_repository_exists(namespace, repository)
            self.assert_added_remote_defaults()

    def action_delete(self, cassette_name, repository, namespace=None):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            if namespace:
                self.service.delete(user=namespace, repo=repository)
            else:
                self.service.delete(repo=repository)
            #
            if not namespace:
                namespace = self.service.user
            self.assert_repository_not_exists(namespace, repository)

    def action_add(self, cassette_name, namespace, repository, alone=False, name=None, tracking='master'):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            # init git in the repository's destination
            self.repository.init()
            self.service.connect()
            self.service.add(user=namespace, repo=repository, alone=alone, name=name, tracking=tracking)
            #
            if not tracking:
                if not alone and not name:
                    self.assert_added_remote_defaults()
                elif not alone and name:
                    self.assert_added_remote(name)
                    self.assert_added_remote('all')
                elif alone and not name:
                    self.assert_added_remote(self.service.name)
                elif alone and name:
                    self.assert_added_remote(name)
            else:
                if not alone and not name:
                    self.assert_added_remote_defaults()
                    self.assert_tracking_remote()
                elif not alone and name:
                    self.assert_added_remote(name)
                    self.assert_added_remote('all')
                    self.assert_tracking_remote(name)
                elif alone and not name:
                    self.assert_added_remote(self.service.name)
                    self.assert_tracking_remote(branch_name=tracking)
                elif alone and name:
                    self.assert_added_remote(name)
                    self.assert_tracking_remote(name, tracking)

    def action_request_list(self, cassette_name, namespace, repository, rq_list_data=[]):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            requests = list(self.service.request_list(user=namespace, repo=repository))
            for i, rq in enumerate(rq_list_data):
                assert requests[i] == rq

    def action_request_fetch(self, cassette_name, namespace, repository, request, pull=False, fail=False):
        local_slug = self.service.format_path(namespace=namespace, repository=repository, rw=False)
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            with self.mockup_git(namespace, repository):
                self.set_mock_popen_commands([
                    ('git remote add all {}'.format(local_slug), b'', b'', 0),
                    ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                    ('git version', b'git version 2.8.0', b'', 0),
                    ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                        'POST git-upload-pack (140 bytes)',
                        'remote: Counting objects: 8318, done.',
                        'remote: Compressing objects: 100% (3/3), done.',
                        'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                        'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                        'Resolving deltas: 100% (5126/5126), done.',
                        'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                        ' * branch            master     -> FETCH_HEAD',
                        ' * [new branch]      master     -> {}/master'.format(self.service.name)]).encode('utf-8'),
                    0),
                    ('git version', b'git version 2.8.0', b'', 0),
                    ('git fetch --progress -v {0} pull/{1}/head:request/{1}'.format(self.service.name, request), b'', '\n'.join([
                        'POST git-upload-pack (140 bytes)',
                        'remote: Counting objects: 8318, done.',
                        'remote: Compressing objects: 100% (3/3), done.',
                        'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                        'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                        'Resolving deltas: 100% (5126/5126), done.',
                        'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                        ' * [new branch]      master     -> request/{}'.format(request)]).encode('utf-8'),
                    0)
                ])
                self.service.connect()
                self.service.clone(namespace, repository, rw=False)
            if not fail:
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)
            with self.mockup_git(namespace, repository):
                self.set_mock_popen_commands([
                    ('git version', b'git version 2.8.0', b'', 0),
                    ('git fetch --progress -v {0} pull/{1}/head:request/{1}'.format(self.service.name, request), b'', '\n'.join([
                        'POST git-upload-pack (140 bytes)',
                        'remote: Counting objects: 8318, done.',
                        'remote: Compressing objects: 100% (3/3), done.',
                        'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                        'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                        'Resolving deltas: 100% (5126/5126), done.',
                        'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                        ' * [new branch]      master     -> request/{}'.format(request)]).encode('utf-8'),
                    0)
                ])
                self.service.request_fetch(repository, namespace, request)

    def action_request_create(self, cassette_name,
            namespace, repository, branch,
            title, description,
            create_repository='test_create_requests',
            create_branch='pr-test'):
        '''
        Here we are testing the subcommand 'request create'.

        this test needs sensibly more preparation than other tests, because to create
        a pull request, you need:

        * a repository with commits on both the service and your workspace
        * a new branch with new commits, that has been pushed on the service

        So that's what we're doing below:
            * create a test project on the service,
            * populate the temporary git repository with it
            * create a commit and push it to the service as master
            * create a branch in the workspace
            * create a commit and push it to the service as pr-test

        Then we test the feature:
            * using the branch create a pull request and check the pull request is there

        Finally clean the remote repository

        So all the contextual work is only done
        '''
        cassette_name = '_'.join(['test', self.service.name, cassette_name])
        will_record = 'never' != self.recorder.config.default_cassette_options['record_mode'] \
                and not os.path.exists(os.path.join(self.recorder.config.cassette_library_dir, cassette_name+'.json'))

        @contextmanager
        def prepare_project_for_test():
            if will_record:
                self.service.connect()
                # let's create a project and add it to current repository
                self.service.create(namespace, create_repository, add=True)
                # make a modification, commit and push it
                with open(os.path.join(self.repository.working_dir, 'first_file'), 'w') as test:
                    test.write('he who makes a beast of himself gets rid of the pain of being a man. Dr Johnson')
                self.repository.git.add('first_file')
                self.repository.git.commit(message='First commit')
                self.repository.git.push(self.service.name, 'master')
                # create a new branch
                new_branch = self.repository.create_head(create_branch, 'HEAD')
                self.repository.head.reference = new_branch
                self.repository.head.reset(index=True, working_tree=True)
                # make a modification, commit and push it to that branch
                with open(os.path.join(self.repository.working_dir, 'second_file'), 'w') as test:
                    test.write('La meilleure façon de ne pas avancer est de suivre une idée fixe. J.Prévert')
                self.repository.git.add('second_file')
                self.repository.git.commit(message='Second commit')
                self.repository.git.push('github', create_branch)
            yield
            if will_record:
                self.service.delete(create_repository)

        #self.service.repository = self.repository
        with prepare_project_for_test():
            with self.recorder.use_cassette(cassette_name):
                self.service.connect()
                request = self.service.request_create(
                        namespace,
                        repository,
                        branch,
                        title,
                        description
                )
                return request

    def action_gist_list(self, cassette_name, gist=None, gist_list_data=[]):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            if gist is None:
                gists = list(self.service.gist_list())
                for i, g in enumerate(gist_list_data):
                    assert gists[i] == g
            else:
                gist_files = list(self.service.gist_list())
                for i, gf in enumerate(gist_list_data):
                    assert gist_files[i] == gf

    def action_gist_clone(self, cassette_name, gist):
        with self.mockup_git(None, None):
            self.set_mock_popen_commands([
                ('git version', b'git version 2.8.0', b'', 0),
                ('git remote add gist {}.git'.format(gist), b'', b'', 0),
                ('git pull --progress -v gist master', b'', b'\n'.join([
                    b'POST git-upload-pack (140 bytes)',
                    b'remote: Counting objects: 8318, done.',
                    b'remote: Compressing objects: 100% (3/3), done.',
                    b'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    b'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    b'Resolving deltas: 100% (5126/5126), done.',
                    bytes('From {}'.format(gist), 'utf-8'),
                    b' * branch            master     -> FETCH_HEAD']),
                0),
            ])
            with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
                self.service.connect()
                self.service.gist_clone(gist)


    def action_gist_fetch(self, cassette_name, gist, gist_file=None):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            content = self.service.gist_fetch(gist, gist_file)
            return content

    def action_gist_create(self, cassette_name, description, gist_files, secret):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            content = self.service.gist_create(gist_files, description, secret)

    def action_gist_delete(self, cassette_name, gist):
        with self.recorder.use_cassette('_'.join(['test', self.service.name, cassette_name])):
            self.service.connect()
            content = self.service.gist_delete(gist)

    def action_open(self, cassette_name, namespace, repository):
        self.set_mock_popen_commands([
            ('xdg-open {}'.format(self.service.format_path(namespace=namespace, repository=repository)), b'', b'', 0),
            ('open {}'.format(self.service.format_path(namespace=namespace, repository=repository)), b'', b'', 0),
        ])
        with Replace('subprocess.Popen', self.Popen):
            self.service.open(user=namespace, repo=repository)
Example #51
0
class TestSlurmSingularityRunner(unittest.TestCase):
    def setUp(self):
        log.setLevel('CRITICAL')
        self.Popen = MockPopen()
        replacer = Replacer()
        replacer.replace('popper.runner_host.Popen', self.Popen)
        self.addCleanup(replacer.restore)

    def tearDown(self):
        log.setLevel('NOTSET')

    def test_create_cmd(self):
        config = PopperConfig(workspace_dir='/w')
        config.wid = "abcd"
        with SingularityRunner(config=config) as sr:
            step = {'args': ['-two', '-flags']}
            sr._setup_singularity_cache()
            sr._container = os.path.join(sr._singularity_cache, 'c1.sif')
            cmd = sr._create_cmd(step, 'c1.sif')

            expected = (
                'singularity run'
                ' --userns --pwd /workspace'
                ' --bind /w:/workspace'
                f' {os.environ["HOME"]}/.cache/popper/singularity/abcd/c1.sif'
                ' -two -flags')

            self.assertEqual(expected, cmd)

        config_dict = {
            'engine': {
                'name': 'singularity',
                'options': {
                    'hostname': 'popper.local',
                    'ipc': True,
                    'bind': ['/path/in/host:/path/in/container']
                }
            },
            'resource_manager': {
                'name': 'slurm'
            }
        }

        config = PopperConfig(workspace_dir='/w', config_file=config_dict)
        config.wid = "abcd"

        with SingularityRunner(config=config) as sr:
            step = {'args': ['-two', '-flags']}
            sr._setup_singularity_cache()
            sr._container = os.path.join(sr._singularity_cache, 'c2.sif')
            cmd = sr._create_cmd(step, 'c2.sif')

            expected = (
                'singularity run --userns --pwd /workspace'
                ' --bind /w:/workspace'
                ' --bind /path/in/host:/path/in/container'
                ' --hostname popper.local'
                ' --ipc'
                f' {os.environ["HOME"]}/.cache/popper/singularity/abcd/c2.sif'
                ' -two -flags')

            self.assertEqual(expected, cmd)

    @replace('popper.runner_slurm.os.kill', mock_kill)
    def test_run(self, mock_kill):
        self.Popen.set_command(
            'sbatch --wait --job-name popper_1_123abc '
            '--output /tmp/popper/slurm/popper_1_123abc.out '
            '/tmp/popper/slurm/popper_1_123abc.sh',
            returncode=0)

        self.Popen.set_command('tail -f /tmp/popper/slurm/popper_1_123abc.out',
                               returncode=0)

        config_dict = {
            'engine': {
                'name': 'singularity',
                'options': {
                    'hostname': 'popper.local',
                    'bind': ['/path/in/host:/path/in/container']
                }
            },
            'resource_manager': {
                'name': 'slurm'
            }
        }

        config = PopperConfig(workspace_dir='/w', config_file=config_dict)
        config.wid = "123abc"

        with WorkflowRunner(config) as r:
            wf = YMLWorkflow("""
            version: '1'
            steps:
            - uses: 'popperized/bin/sh@master'
              runs: ls
            """)
            wf.parse()
            r.run(wf)

        with open('/tmp/popper/slurm/popper_1_123abc.sh', 'r') as f:
            content = f.read()
            self.assertEqual(
                content, f"""#!/bin/bash
singularity exec --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local {os.environ['HOME']}/.cache/popper/singularity/123abc/popper_1_123abc.sif ls"""
            )
Example #52
-1
 def test_communicate_with_stderr_redirected_check_stderr_is_none(self):
     # setup
     Popen = MockPopen()
     Popen.set_command('a command', stdout=b'foo', stderr=b'bar')
     # usage
     process = Popen('a command', stdout=PIPE, stderr=STDOUT, shell=True)
     out, err = process.communicate()
     # test stderr is None
     compare(out, b'foobar')
     compare(err, None)