class Test_incorrect_invocation(TestCase):
    mainMsg = '''You have invoked this script as bbpgsql.
This script is supposed to be invoked through the commands archivepgsql
and archivewal.  Please check with your adminstrator to make sure these
commands were installed correctly.
'''
    unknownMsg = 'Unknown command: unknown\n'

    def setUp(self):
        self.tempdir = TempDirectory()
        self.config_dict = {
        }
        self.config_path = os.path.join(self.tempdir.path, 'config.ini')
        write_config_to_filename(self.config_dict, self.config_path)

    def tearDown(self):
        self.tempdir.cleanup()

    @patch('bbpgsql.bbpgsql_main.exit')
    def test_invocation_using_main_script_fails(self,
        mock_exit):
        bbpgsql_main(['bbpgsql', '-c', self.config_path])
        mock_exit.assert_called_with(1)

    @patch('bbpgsql.bbpgsql_main.stdout.write')
    @patch('bbpgsql.bbpgsql_main.exit')
    def test_invocation_using_unknown_fails(self,
        mock_exit, mock_stdout_write):
        bbpgsql_main(['unknown'])
        mock_stdout_write.assert_called_once_with(self.unknownMsg)
        mock_exit.assert_called_once_with(1)
Example #2
0
class TestGetLanes(unittest.TestCase):
	
	def setUp(self):
		self.tempdir = TempDirectory() 
		self.tempdir.write('fake_file1.txt', b'some foo thing') 
		self.tempdir.write('fake_tmp_files/folder/afile.txt', b'the text')
		self.tempdir.write('fake_directory/fake_file2.txt', b'the text') 
		self.tempdir.write('fake_directory/afile.bam', b'the text') 
		self.tempdir.write('fake_directory/afile.sam', b'the text') 
		self.tempdir.write('fake_directory/afile.fastq.gz', b'the text') 
		self.tempdir.makedir('empty_directory') 
		self.tempdir_path = self.tempdir.path

	def tearDown(self):
		self.tempdir.cleanup()
		pass 
		
	def test_get_lanes_returns_data(self):
		data_list = bytes(str(self.tempdir.path + '/fake_tmp_files/folder/afile.txt\n'+self.tempdir.path + '/fake_directory/fake_file2.txt\n' + 'fake_path\n'),'ascii')
		with mock.patch('archive_project.GetLanes.check_output', return_value=data_list) as co:
			actual, message = get_lanes("study")
		co.assert_called_once_with(['pf', 'data', '-t', 'study', '-i', 'study'])
		expected = [str(self.tempdir.path + '/fake_tmp_files/folder/afile.txt'), str(self.tempdir.path + '/fake_directory/fake_file2.txt')]
		self.assertEqual(expected,actual)
		self.assertEqual('This path was returned by pf, but does not actually exist: fake_path\n', message)

	def test_get_lanes_returns_nodata(self):
		with mock.patch('archive_project.GetLanes.check_output', return_value=b'') as co:
			with mock.patch('os.path.exists', return_value=True) as pe:
				actual, message = get_lanes("fake_study")
		self.assertEqual([],actual)
		pe.assert_not_called()
		self.assertEqual('Unknown study or no data associated with study: fake_study\n',message)
class MiscReportFuncTestSuite(unittest.TestCase):
    """spdxLicenseManager tag-value miscellaneous reporting FT suite."""
    def setUp(self):
        self.runner = CliRunner()
        setUpSandbox(self, slm.cli)
        runSandboxCommands(self, slm.cli)

        # set up temp directory for outputting reports
        self.reportDir = TempDirectory()

    def tearDown(self):
        self.reportDir.cleanup()
        self.reportDir = None
        tearDownSandbox(self)

    def test_cannot_request_an_invalid_report_format(self):
        # Edith asks for a report in a format that doesn't exist
        reportPath = self.reportDir.path + "/report.xlsx"
        result = runcmd(self, slm.cli, "frotz", "--subproject",
                        "frotz-nuclear", "create-report", "--scan_id", "1",
                        "--report_format", "blah", "--report_path", reportPath)

        # It fails and explains why
        self.assertEqual(1, result.exit_code)
        self.assertEqual(f"Unknown report format: blah\n", result.output)
class Test_archivewal_requires_WAL_file(TestCase):
    def setUp(self):
        self.tempdir = TempDirectory()
        self.config_dict = {
            'General': {
                'pgsql_data_directory': self.tempdir.path,
            },
        }
        self.config_file = os.path.join(self.tempdir.path, 'config_file')
        write_config_to_filename(self.config_dict, self.config_file)
        parser, self.options, self.args = archivewal_parse_args(['-c',
            self.config_file])

    def tearDown(self):
        self.tempdir.cleanup()

    def test_will_raise_exception_with_no_WAL_file(self):

        def will_raise_Exception():
            archivewal_validate_options_and_args(self.options, [])
        self.assertRaises(Exception, will_raise_Exception)

    def test_exception_is_explicit_about_error(self):
        try:
            archivewal_validate_options_and_args(self.options, [])
        except Exception, e:
            print 'Exception', e
            self.assertTrue('path to a WAL file' in str(e))
        else:
Example #5
0
class WithTempDir(object):

    def setUp(self):
        self.dir = TempDirectory()

    def tearDown(self):
        self.dir.cleanup()
Example #6
0
class RunServerTests(unittest.TestCase):
    def setUp(self):
        self.temp_dir = TempDirectory()

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_run_server(self):
        with mock.patch.dict(
            os.environ,
            {
                "LAN_SERVER_LOG_FILE": "logger.txt",
                "LOG_DIRECTORY": self.temp_dir.path,
                "LAN_PORT": "0000",
            },
        ):
            with LogCapture() as capture:
                mock_server = mock.MagicMock()
                mock_server.return_value.server_forever = mock.MagicMock()
                mock_handler = mock.MagicMock()

                runServer(mock_server, mock_handler)

                capture.check(
                    ("LAN_SERVER_LOG_FILE", "INFO", "Starting server on port: 0"),
                    ("LAN_SERVER_LOG_FILE", "INFO", "Stopping\n"),
                )
Example #7
0
    def test_orders_stop(self, name, order_data, event_data, expected):
        tempdir = TempDirectory()
        try:
            data = order_data
            data['sid'] = self.ASSET133

            order = Order(**data)

            assets = {
                133: pd.DataFrame({
                    "open": [event_data["open"]],
                    "high": [event_data["high"]],
                    "low": [event_data["low"]],
                    "close": [event_data["close"]],
                    "volume": [event_data["volume"]],
                    "dt": [pd.Timestamp('2006-01-05 14:31', tz='UTC')]
                }).set_index("dt")
            }

            write_bcolz_minute_data(
                self.env,
                pd.date_range(
                    start=normalize_date(self.minutes[0]),
                    end=normalize_date(self.minutes[-1])
                ),
                tempdir.path,
                assets
            )

            equity_minute_reader = BcolzMinuteBarReader(tempdir.path)

            data_portal = DataPortal(
                self.env,
                equity_minute_reader=equity_minute_reader,
            )

            slippage_model = VolumeShareSlippage()

            try:
                dt = pd.Timestamp('2006-01-05 14:31', tz='UTC')
                bar_data = BarData(data_portal,
                                   lambda: dt,
                                   'minute')
                _, txn = next(slippage_model.simulate(
                    bar_data,
                    self.ASSET133,
                    [order],
                ))
            except StopIteration:
                txn = None

            if expected['transaction'] is None:
                self.assertIsNone(txn)
            else:
                self.assertIsNotNone(txn)

                for key, value in expected['transaction'].items():
                    self.assertEquals(value, txn[key])
        finally:
            tempdir.cleanup()
Example #8
0
    def test_replace_file_contents(self):
        """ファイルの文字列を書き換える関数のテスト。"""
        # ファイルやディレクトリのテストでは、まず以下インスタンスを生成する。
        temp_dir = TempDirectory()

        # writeメソッドで、ファイル作成できる(以下のように、存在しないディレクトリも再帰的に作ってくれる)。
        # 一点注意すべきなのが、デフォルトだとbyte文字列しか書き込みできないこと。
        # 通常の文字列を書き込みたい場合、エンコーディングの指定が必須。
        # http://testfixtures.readthedocs.io/en/latest/api.html
        temp_file = temp_dir.write('foo/bar/sample.txt',
                                   'I love cat.\nMy cat\'s name is mike.\n',
                                   encoding='utf-8')

        # テストケース実施
        replace_file_contents(temp_dir.path + '/foo/bar', 'sample.txt', 'cat',
                              'dog')

        # ファイルの結果を確認する。
        self.assertEqual(
            'I love dog.\nMy dog\'s name is mike.\n',
            # readメソッド呼び出し時にも、エンコーディングの指定が要るので注意(省略した場合はバイト文字列)。
            temp_dir.read('foo/bar/sample.txt', encoding='utf-8'))

        # 以下を呼ぶことで、ディレクトリ以下の内容を再帰的にcleanしてくれる。
        temp_dir.cleanup()
class Test_SnapshotArchive_Repository(TestCase):
    def setUp(self):
        store = MemoryCommitStorage()
        self.repo = BBRepository(store)
        self.tempdir = TempDirectory()
        self.setup_archive_a_snapshot()

    def setup_archive_a_snapshot(self):
        archive_name = 'somearchive.tgz'
        self.archive_contents = '123'
        self.archive_path = self.tempdir.write(archive_name,
            self.archive_contents)
        self.tag = generate_tag()
        self.first_WAL = '01234'
        self.last_WAL = '45678'
        commit_snapshot_to_repository(self.repo, self.archive_path, self.tag,
            self.first_WAL, self.last_WAL)

    def tearDown(self):
        self.tempdir.cleanup()

    def test_can_retrieve_snapshot_contents_with_tag(self):
        commit = [i for i in self.repo][-1]
        restore_path = self.tempdir.getpath('restorearchive.tgz')
        commit.get_contents_to_filename(restore_path)
        self.assertEqual(self.archive_contents,
            open(restore_path, 'rb').read())

    def test_get_first_WAL_file_for_archived_snapshot_with_tag(self):
        self.assertEqual(self.first_WAL, get_first_WAL(self.repo, self.tag))

    def test_get_last_WAL_file_for_archived_snapshot_with_tag(self):
        self.assertEqual(self.last_WAL, get_last_WAL(self.repo, self.tag))
Example #10
0
    def test_atexit(self):
        # http://bugs.python.org/issue25532
        from testfixtures.mock import call

        m = Mock()
        with Replacer() as r:
            # make sure the marker is false, other tests will
            # probably have set it
            r.replace('testfixtures.TempDirectory.atexit_setup', False)
            r.replace('atexit.register', m.register)

            d = TempDirectory()

            expected = [call.register(d.atexit)]

            compare(expected, m.mock_calls)

            with catch_warnings(record=True) as w:
                d.atexit()
                self.assertTrue(len(w), 1)
                compare(str(w[0].message), (  # pragma: no branch
                    "TempDirectory instances not cleaned up by shutdown:\n" +
                    d.path
                    ))

            d.cleanup()

            compare(set(), TempDirectory.instances)

            # check re-running has no ill effects
            d.atexit()
class TestConfig(TestCase):
    def setUp(self):
        self.dir = TempDirectory()

    def tearDown(self):
        self.dir.cleanup()

    def test_table_in_multiple_sources(self):
        m1 = MetaData()
        t1 = Table('table', m1)
        m2 = MetaData()
        t2 = Table('table', m2)

        with ShouldRaise(
                ValueError("Tables present in more than one Source: table")):
            Config(
                Source(t1),
                Source(t2),
            )

    def test_table_excludes(self):
        m1 = MetaData()
        t1 = Table('t1', m1)
        s1 = Source(t1)
        m2 = MetaData()
        t2 = Table('t2', m2)
        s2 = Source(t2)

        c = Config(s1, s2)

        compare({'t2'}, c.excludes[s1])
        compare({'t1'}, c.excludes[s2])
Example #12
0
 def test_cleanup(self):
     d = TempDirectory()
     p = d.path
     assert os.path.exists(p) is True
     p = d.write('something', b'stuff')
     d.cleanup()
     assert os.path.exists(p) is False
class Test_archivewal_parse_arges_returns_parser_options_args(TestCase):
    def setUp(self):
        self.tempdir = TempDirectory()
        self.config_dict = {
            'General': {
                'pgsql_data_directory': self.tempdir.path,
            },
        }
        self.config_file = os.path.join(self.tempdir.path, 'config_file')
        write_config_to_filename(self.config_dict, self.config_file)

    def tearDown(self):
        self.tempdir.cleanup()

    def test_archivewal_parse_args_returns_three_items(self):
        item1, item2, item3 = archivewal_parse_args(args=['walfilename'])
        self.assertNotEqual(type(item1), type(None))
        self.assertNotEqual(type(item2), type(None))
        self.assertNotEqual(type(item3), type(None))

    def test_archivewal_parse_args_returns_parser(self):
        parser, item2, item3 = archivewal_parse_args(args=['walfilename'])
        self.assertTrue(isinstance(parser, OptionParser))

    def test_archivewal_parse_args_returns_options(self):
        item1, options, item3 = archivewal_parse_args(args=['walfilename'])
        self.assertTrue(isinstance(options, object))

    def test_archivewal_parse_args_returns_args(self):
        item1, item2, args = archivewal_parse_args(args=['walfilename'])
        self.assertEqual(type(args), type([]))
Example #14
0
class TestConfig(TestCase):

    def setUp(self):
        self.dir = TempDirectory()

    def tearDown(self):
        self.dir.cleanup()
        
    def test_table_in_multiple_sources(self):
        m1 = MetaData()
        t1 = Table('table', m1)
        m2 = MetaData()
        t2 = Table('table', m2)

        with ShouldRaise(
            ValueError("Tables present in more than one Source: table")
            ):
            Config(
                Source(t1),
                Source(t2),
                )

    def test_table_excludes(self):
        m1 = MetaData()
        t1 = Table('t1', m1)
        s1 = Source(t1)
        m2 = MetaData()
        t2 = Table('t2', m2)
        s2 = Source(t2)
        
        c = Config(s1, s2)

        compare({'t2'}, c.excludes[s1])
        compare({'t1'}, c.excludes[s2])
class TestConceptTrainer(TestCase):
    def setUp(self):
        ce = dsconcept.model.ConceptExtractor()
        fe = dsconcept.model.FeatureExtractor()
        self.d = TempDirectory()
        data = b'{"abstract":["Astronauts are very cool."], "concept": ["ASTRONAUTS", "COOL THINGS"]}\n {"abstract":["NASA is going to Mars."], "concept":["NASA", "MARS"]}'
        self.d.write("test.json", data)
        self.corpus_path = f"{self.d.path}/test.json"
        s = 100
        self.X = csc_matrix(
            np.random.randint(2, size=s * 2).reshape(int(s), 2))
        self.y = np.random.randint(2, size=s)
        paramgrid = {
            "alpha": [0.01, 0.001, 0.0001],
            "class_weight": [{
                1: 10,
                0: 1
            }, {
                1: 5,
                0: 1
            }, {
                1: 20,
                0: 1
            }],
            "max_iter": [1],
            "loss": ["log"],
        }  # requires loss function with predict_proba
        clf = GridSearchCV(SGDClassifier(), paramgrid,
                           scoring="f1")  # requires GridSearchCV
        self.ct = dsconcept.train.ConceptTrainer(ce, clf)

    def test_create_concept_classifier(self):
        out_dir = Path(f"{self.d.path}/models")
        out_dir.mkdir()
        X_train, X_test, y_train, y_test = train_test_split(self.X,
                                                            self.y,
                                                            test_size=0.5,
                                                            random_state=42)
        self.ct.create_concept_classifier("test_concept", X_train, X_test,
                                          y_train, y_test, out_dir)
        clf = joblib.load(out_dir / "test_concept.pkl")
        LOG.info(clf)

    def test_train_all(
            self):  # This test is super naive. Does not check behaviour.
        self.ct.train_all(self.X, Path(f"{self.d.path}/models"), 5)
        test_inds = np.load((Path(f"{self.d.path}") / "test_inds.npy"))
        train_inds = np.load((Path(f"{self.d.path}") / "train_inds.npy"))
        LOG.info(f"test_inds: {test_inds}")
        LOG.info(f"train_inds: {train_inds}")

    @given(arrays(dtype=np.float_, shape=1))
    def test_get_dispersed_subset(self, array):
        subset = dsconcept.train.get_dispersed_subset(array, 5)
        self.assertLessEqual(len(subset), 5)
        LOG.info(subset)

    def tearDown(self):
        self.d.cleanup()
Example #16
0
class Test_archivepgsql_backup_invocation(TestCase):
    ARCHIVEPGSQL_PATH = os.path.join('bbpgsql', 'cmdline_scripts')
    CONFIG_FILE = 'config.ini'
    exe_script = 'archivepgsql'

    def setUp(self):
        self.setup_environment()
        self.setup_config()
        self.execution_sequence = 0

    def setup_environment(self):
        self.env = deepcopy(os.environ)
        self.env['PATH'] = ''.join([
            self.env['PATH'],
            ':',
            self.ARCHIVEPGSQL_PATH])
        self.tempdir = TempDirectory()
        self.data_dir = self.tempdir.makedir('pgsql_data')
        self.archive_dir = self.tempdir.makedir('pgsql_archive')

    def setup_config(self):
        self.config_path = os.path.join(self.tempdir.path, self.CONFIG_FILE)
        self.config_dict = {
            'General': {
                'pgsql_data_directory': self.data_dir,
            },
            'Snapshot': {
                'driver': 'memory',
            },
        }
        write_config_to_filename(self.config_dict, self.config_path)
        self.config = get_config_from_filename_and_set_up_logging(
            self.config_path
        )

    def tearDown(self):
        self.tempdir.cleanup()

    @patch('bbpgsql.archive_pgsql.commit_snapshot_to_repository')
    @patch('bbpgsql.archive_pgsql.create_archive')
    @patch('bbpgsql.archive_pgsql.pg_stop_backup')
    @patch('bbpgsql.archive_pgsql.pg_start_backup')
    def test_perform_backup(self, mock_pg_start_backup, mock_pg_stop_backup,
        mock_create_archive, mock_commit_snapshot_to_repository):
        first_WAL = '000000D0'
        second_WAL = '000000D1'
        mock_pg_start_backup.return_value = first_WAL
        mock_pg_stop_backup.return_value = second_WAL
        archiveFile = os.path.join(self.archive_dir, 'pgsql.snapshot.tar')
        tag = bbpgsql.archive_pgsql.generate_tag()
        repo = get_Snapshot_repository(self.config)
        bbpgsql.archive_pgsql.perform_backup(self.data_dir,
            archiveFile, tag, repo)
        mock_pg_start_backup.assert_called_once_with(tag)
        mock_create_archive.assert_called_once_with(self.data_dir, archiveFile)
        self.assertEqual(mock_pg_stop_backup.called, True)
        self.assertEqual(mock_pg_stop_backup.call_count, 1)
        mock_commit_snapshot_to_repository.assert_called_once_with(
            repo, archiveFile, tag, first_WAL, second_WAL)
Example #17
0
class TestFileDownload(unittest.TestCase):

    def setUp(self):
        self.tempdir = TempDirectory()
        self.tempdir.write('1/Accession1.fastq.gz', b'the text')
        self.tempdir.write('2/Accession1_1.fastq.gz', b'the text')
        self.tempdir.write('2/Accession1_2.fastq.gz',b'the text')
        self.tempdir_path = self.tempdir.path
        print('temp',self.tempdir_path)
        self.under_test1 = Preparation.new_instance(Spreadsheet.new_instance("MyStudy", [
            RawRead(forward_read='Accession1', reverse_read='T', sample_name='SAMPLE1',
                    taxon_id='1280', library_name='LIB1', sample_accession=None),
            RawRead(forward_read='Accession2', reverse_read='T', sample_name='SAMPLE2',
                    taxon_id='1280', library_name='LIB2', sample_accession=None)]), self.tempdir_path, 0, 0)
        self.under_test2 = Preparation.new_instance(Spreadsheet.new_instance("MyStudy", [
            RawRead(forward_read='Accession1', reverse_read='T', sample_name='SAMPLE1',
                    taxon_id='1280', library_name='LIB1', sample_accession=None),
            RawRead(forward_read='Accession2', reverse_read='T', sample_name='SAMPLE2',
                    taxon_id='1280', library_name='LIB2', sample_accession=None)]), self.tempdir_path, 1, 0)
        self.under_test3 = Preparation.new_instance(Spreadsheet.new_instance("MyStudy", [
            RawRead(forward_read='Accession1', reverse_read='T', sample_name='SAMPLE1',
                    taxon_id='1280', library_name='LIB1', sample_accession=None),
            RawRead(forward_read='Accession2', reverse_read='T', sample_name='SAMPLE2',
                    taxon_id='1280', library_name='LIB2', sample_accession=None)]), self.tempdir_path, 2, 0)

    def tearDown(self):
        self.tempdir.cleanup()
        pass

    def test_ENA_download_calls_create_commands_correctly(self):
        connections=1
        with patch("importer.writer.Preparation.create_dataframe_from_list", return_value = 'df') as mock_create_dataframe_from_list:
            with patch("importer.writer.Preparation.check_if_file_downloaded", return_value =False) as mock_check_if_file_downloaded:
                with patch("importer.writer.create_commands", return_value='df') as mock_create_commands:
                    with patch("importer.writer.submit_commands") as mock_submit_commands:
                       self.under_test1.download_files_from_ena(connections)
        mock_create_dataframe_from_list.assert_called_once_with(['Accession1','Accession2'])
        mock_create_commands.assert_called_once_with('df', 1, self.tempdir_path+'/0')
        mock_submit_commands.assert_called_once_with('df')

    def test_create_dataframe_from_list(self):
        reads = ['Accession1','Accession2']
        actual=self.under_test1.create_dataframe_from_list(reads)
        expected = pd.DataFrame(([read, 'import_%s' % read] for read in reads),
                          columns=('Read accession', 'Job_name'))
        pd.testing.assert_frame_equal(actual,expected)

    def test_check_if_file_downloaded_no_files(self):
        actual=self.under_test1.check_if_file_downloaded('Accession1')
        self.assertEqual(actual,False)

    def test_check_if_file_downloaded_single_ended_exists(self):
        actual=self.under_test2.check_if_file_downloaded('Accession1')
        self.assertEqual(actual,True)

    def test_check_if_file_downloaded_double_ended_exists(self):
        actual=self.under_test3.check_if_file_downloaded('Accession1')
        self.assertEqual(actual,True)
Example #18
0
class test_task(unittest.TestCase):
    def setUp(self):
        self.dir = TempDirectory()

    def tearDown(self):
        self.dir.cleanup()

    def _run_task(self, parameters, proc1, outputnxprocess=True):
        if proc1:
            previoustask = utils.nxpathtotask(proc1)
            self.assertTrue(previoustask.done)
        else:
            previoustask = None

        # Check run
        task2 = utils.create_task(dependencies=previoustask, **parameters)
        self.assertFalse(task2.done)
        task2.run()
        self.assertTrue(task2.done)
        if outputnxprocess:
            checksum = task2.checksum
            self.assertEqual(checksum, task2.output.checksum)
            self.assertTrue(task2.output.valid_checksum())
        proc2 = task2.output

        # Check re-run (same task instance)
        task2.run()
        proc3 = task2.output
        self.assertEqual(proc2, proc3)

        # Check re-run (new task instance)
        task3 = utils.create_task(dependencies=previoustask, **parameters)
        self.assertTrue(task3.done)
        task3.run()
        proc3 = task3.output
        self.assertEqual(proc2, proc3)

        # Check re-run (reconstructed task instance)
        if outputnxprocess:
            task4 = utils.nxpathtotask(proc2)
            self.assertEqual(type(task4), type(task2))
            self.assertTrue(task4.done)
            self.assertEqual(checksum, task4.checksum)
            self.assertEqual(checksum, task4.output.checksum)
            self.assertTrue(task4.output.valid_checksum())

            # Check re-run (new task instance)
            task4.run()
            proc3 = task4.output
            self.assertEqual(proc2, proc3)

        return proc2

    def _check_reproc(self, proc1, proc2):
        self.assertNotEqual(proc1, proc2)
        self.assertEqual(proc1.name.split(".")[-1], "1")
        self.assertEqual(proc2.name.split(".")[-1], "2")
class TestFeatureExtractor(TestCase):
    def setUp(self):
        self.fe = FeatureExtractor()
        self.d = TempDirectory()
        data = b'{"abstract":"Astronauts are very cool.", "concept": ["ASTRONAUTS", "COOL THINGS"]} \n {"abstract":"NASA is going to Mars.", "concept":["NASA", "MARS"]}'
        self.d.write("test.json", data)
        self.corpus_path = f"{self.d.path}/test.json"

    @given(features())
    def test_features(self, d):
        self.fe.features = d
        self.assertEqual(len(self.fe.features), len(d))

    def test_from_corpus_to_jsonlines(self):
        self.fe.from_corpus_to_jsonlines(
            self.corpus_path,
            f"{self.d.path}/features.jsonl",
            "abstract",
        )

    def test_from_jsonlines(self):
        data = b'{"astronaut":"NOUN", "space": "NOUN", "NASA": "ENT"}\n{"Mars": "PROPN", "dog": "NOUN"}'
        features_out = "features.jsonl"
        self.d.write(features_out, data)
        self.fe.from_jsonlines(f"{self.d.path}/{features_out}")
        self.assertSetEqual(self.fe.term_types, {"NOUN", "PROPN", "ENT"})

    def test_to_jsonlines(self):
        self.fe.features = [
            {
                "space": "NOUN",
                "Mars": "PROPN"
            },
            {
                "Anita": "PROPN",
                "Adams": "PROPN"
            },
        ]
        out_features = "features.jsonl"
        self.fe.to_jsonlines(f"{self.d.path}/{out_features}")

    @given(features(), weights())
    def test_weight_terms(self, d, w):
        self.fe.features = d
        self.fe.weight_terms(w)

    @given(features(), weights())
    def test_limit_features(self, d, w):
        self.fe.features = d
        weighted_features = self.fe.weight_terms(
            w)  # Test method contingent upon another test. Bad?
        self.fe.limit_features(weighted_features,
                               feature_min=1,
                               feature_max=0.90)

    def tearDown(self):
        self.d.cleanup()
Example #20
0
class MailTestCaseMixin(TestCase):
    def _pre_setup(self):
        super(MailTestCaseMixin, self)._pre_setup()
        self.tempdir = TempDirectory()
        self.settings_override = override_settings(
            MEDIA_ROOT=self.tempdir.path,
            EMAIL_BACKEND=u'poleno.mail.backend.EmailBackend',
        )
        self.settings_override.enable()

    def _post_teardown(self):
        self.settings_override.disable()
        self.tempdir.cleanup()
        super(MailTestCaseMixin, self)._post_teardown()

    def _call_with_defaults(self, func, kwargs, defaults):
        omit = kwargs.pop(u'omit', [])
        defaults.update(kwargs)
        for key in omit:
            defaults.pop(key, None)
        return func(**defaults)

    def _create_attachment(self, **kwargs):
        content = kwargs.pop(u'content', u'Default Testing Content')
        return self._call_with_defaults(
            Attachment.objects.create, kwargs, {
                u'file': ContentFile(content, name=u'overriden-file-name.bin'),
                u'name': u'default_testing_filename.txt',
                u'content_type': u'text/plain',
            })

    def _create_recipient(self, **kwargs):
        return self._call_with_defaults(
            Recipient.objects.create, kwargs, {
                u'name': u'Default Testing Name',
                u'mail': u'*****@*****.**',
                u'type': Recipient.TYPES.TO,
                u'status': Recipient.STATUSES.INBOUND,
                u'status_details': u'',
                u'remote_id': u'',
            })

    def _create_message(self, **kwargs):
        return self._call_with_defaults(
            Message.objects.create, kwargs, {
                u'type': Message.TYPES.INBOUND,
                u'processed': utc_now(),
                u'from_name': u'Default Testing From Name',
                u'from_mail': u'*****@*****.**',
                u'received_for': u'*****@*****.**',
                u'subject': u'Default Testing Subject',
                u'text': u'Default Testing Text Content',
                u'html': u'<p>Default Testing HTML Content</p>',
                u'headers': {
                    'X-Default-Testing-Extra-Header': 'Default Testing Value'
                },
            })
Example #21
0
class test_next_output_stem(unittest.TestCase):
    """tests for next_output_stem"""

    #next_output_stem(prefix, path=None, start=1, inc=1, zfill=3,
    #       overwrite=False)

    def setUp(self):
        self.tempdir = TempDirectory()
        self.tempdir.write('a_004', b'some text a4')
        self.tempdir.write('a_005', b'some text a5')
        self.tempdir.write('b_002.txt', b'some text b2')
        self.tempdir.write('b_008.out', b'some text b8')
        self.tempdir.write(('c_010', 'por'), b'some text c5por')

    def tearDown(self):
        self.tempdir.cleanup()


#    @with_setup(setup=self.setup, teardown=self.teardown)

    def test_file(self):
        assert_equal(next_output_stem(prefix='a_', path=self.tempdir.path),
                     'a_006')

    def test_file2(self):
        assert_equal(next_output_stem(prefix='b_', path=self.tempdir.path),
                     'b_009')

    def test_directory(self):
        assert_equal(next_output_stem(prefix='c_', path=self.tempdir.path),
                     'c_011')

    def test_file_overwrite(self):
        assert_equal(
            next_output_stem(prefix='a_',
                             path=self.tempdir.path,
                             overwrite=True), 'a_005')

    def test_inc(self):
        assert_equal(
            next_output_stem(prefix='a_', path=self.tempdir.path, inc=3),
            'a_008')

    def test_zfill(self):
        assert_equal(
            next_output_stem(prefix='a_', path=self.tempdir.path, zfill=5),
            'a_00006')

    def test_does_not_exist(self):
        assert_equal(next_output_stem(prefix='g_', path=self.tempdir.path),
                     'g_001')

    def test_does_not_exist(self):
        assert_equal(
            next_output_stem(prefix='g_', path=self.tempdir.path, start=4),
            'g_004')
 def test_atexit(self):
     d = TempDirectory()
     self.assertTrue(TempDirectory.atexit in [t[0] for t in atexit._exithandlers])
     with catch_warnings(record=True) as w:
         d.atexit()
         self.assertTrue(len(w), 1)
         compare(str(w[0].message), ("TempDirectory instances not cleaned up by shutdown:\n" + d.path))
     d.cleanup()
     # call again to make sure nothing blows up:
     d.atexit()
Example #23
0
class TestFixtureLoad:
    def setUp(self):
        self.d = TempDirectory()
        fixtureload.set_source_dir(self.d.path)
        self.d.write(
            'test.json',
            json.dumps({
                'description': {
                    'samples': {
                        'fixture0': {
                            'a': 'b'
                        },
                        'fixture1': {
                            'c': 'd'
                        }
                    }
                }
            }))
        self.d.write('test_corrupted.json', 'corrupted json')

    def tearDown(self):
        self.d.cleanup()

    def test_can_set_fixture_source_directory(self):
        fixtureload.set_source_dir('/tmp')
        assert fixtureload.fixture_source_dir == '/tmp'

    def test_can_load_fixture(self):
        fixture = fixtureload.load('test/description/fixture0')
        compare(fixture, {'a': 'b'})
        fixture = fixtureload.load('test/description/fixture1')
        compare(fixture, {'c': 'd'})

    def test_load_fixture_with_not_existed_file_should_raise(self):
        with ShouldRaise(IOError):
            fixtureload.load('not_existed/description/fixture0')

    def test_load_fixture_with_corrupted_file_should_raise(self):
        with ShouldRaise(ValueError):
            fixtureload.load('test_corrupted/description/fixture0')

    def test_parse_fixture_path(self):
        path = fixtureload.parse_fixture_path('test/desc/fixture0')
        compare(
            path, {
                'source_file': 'test.json',
                'fixture_desc': 'desc',
                'fixture': 'fixture0'
            })

    def test_parse_fixture_path_if_path_is_not_valid(self):
        with ShouldRaise(
                ValueError('Fixture Path is not valid (testfixture0)')):
            fixtureload.parse_fixture_path('testfixture0')
Example #24
0
class MailTestCaseMixin(TestCase):

    def _pre_setup(self):
        super(MailTestCaseMixin, self)._pre_setup()
        self.tempdir = TempDirectory()
        self.settings_override = override_settings(
            MEDIA_ROOT=self.tempdir.path,
            EMAIL_BACKEND=u'poleno.mail.backend.EmailBackend',
            )
        self.settings_override.enable()

    def _post_teardown(self):
        self.settings_override.disable()
        self.tempdir.cleanup()
        super(MailTestCaseMixin, self)._post_teardown()


    def _call_with_defaults(self, func, kwargs, defaults):
        omit = kwargs.pop(u'omit', [])
        defaults.update(kwargs)
        for key in omit:
            defaults.pop(key, None)
        return func(**defaults)

    def _create_attachment(self, **kwargs):
        content = kwargs.pop(u'content', u'Default Testing Content')
        return self._call_with_defaults(Attachment.objects.create, kwargs, {
            u'file': ContentFile(content, name=u'overriden-file-name.bin'),
            u'name': u'default_testing_filename.txt',
            u'content_type': u'text/plain',
            })

    def _create_recipient(self, **kwargs):
        return self._call_with_defaults(Recipient.objects.create, kwargs, {
            u'name': u'Default Testing Name',
            u'mail': u'*****@*****.**',
            u'type': Recipient.TYPES.TO,
            u'status': Recipient.STATUSES.INBOUND,
            u'status_details': u'',
            u'remote_id': u'',
            })

    def _create_message(self, **kwargs):
        return self._call_with_defaults(Message.objects.create, kwargs, {
            u'type': Message.TYPES.INBOUND,
            u'processed': utc_now(),
            u'from_name': u'Default Testing From Name',
            u'from_mail': u'*****@*****.**',
            u'received_for': u'*****@*****.**',
            u'subject': u'Default Testing Subject',
            u'text': u'Default Testing Text Content',
            u'html': u'<p>Default Testing HTML Content</p>',
            u'headers': {'X-Default-Testing-Extra-Header': 'Default Testing Value'},
            })
class Packager(TestCase):

    def setUp(self):

        self.log = logging.getLogger("testlog")
        logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)

        # create temporary dir
        self.temp_dir = TempDirectory()
        self.temp_dir_name = self.temp_dir.path
        self.input_path = Path(self.temp_dir_name, "repo/inputs")
        self.data_path = Path(self.input_path, "CPER/2019/05")
        self.out_path = Path(self.temp_dir_name, "outputs")
        self.output_path = Path(self.out_path, "CPER/2019/05")
        self.data_file_1 = Path(self.data_path, '24/NEON.D10.CPER.DP1.00041.001.001.501.001.ST_1_minute.2019-05-24.basic.csv')
        self.data_file_2 = Path(self.data_path, '25/NEON.D10.CPER.DP1.00041.001.001.501.001.ST_1_minute.2019-05-25.basic.csv')
        os.makedirs(Path(self.data_path, '24'))
        os.makedirs(Path(self.data_path, '25'))
        with open(self.data_file_1, 'w') as f:
            f.write("file 1 content")
        with open(self.data_file_2, 'w') as f:
            f.write("file 2 content")
        self.prefix_index = self.data_path.parts.index("CPER")
        self.prefix_length = 3
        self.sort_index = 10

    def test_package(self):
        os.environ["DATA_PATH"] = str(self.data_path)
        os.environ["OUT_PATH"] = str(self.out_path)
        package(prefix_index=self.prefix_index, prefix_length=self.prefix_length, sort_index=self.sort_index)
        self.check_output()

    def test_main(self):
        os.environ["DATA_PATH"] = str(self.data_path)
        os.environ["OUT_PATH"] = str(self.out_path)
        os.environ["PREFIX_INDEX"] = str(self.prefix_index)
        os.environ["PREFIX_LENGTH"]= str(self.prefix_length)
        os.environ["SORT_INDEX"] = str(self.sort_index)
        packager_main.main()
        self.check_output()

    def check_output(self):
        os.chdir(self.output_path)
        out_files = glob.glob("*.csv")
        self.log.debug("INPUT PATH = " + str(self.input_path))
        self.log.debug("OUTPUT PATH = " + str(self.out_path))
        self.log.debug("NUMBER OF OUTPUT FILES = " + str(len(out_files)))
        self.log.debug("OUTPUT FILES = " + str(out_files))
        basic_pattern = 'NEON.D10.CPER.DP1.00041.001.001.501.001.ST_1_minute.2019-05.basic.*.csv'
        self.assertTrue(len(out_files) == 1)
        self.assertTrue(fnmatch.fnmatch(out_files[0], basic_pattern))

    def tearDown(self):
        self.temp_dir.cleanup()
class TestFixtureLoad:
    def setUp(self):
        self.d = TempDirectory()
        fixtureload.set_source_dir(self.d.path)
        self.d.write('test.json',
                json.dumps(
                    {
                        'description':
                            {
                                'samples': {
                                    'fixture0': {'a': 'b'},
                                    'fixture1': {'c': 'd'}
                                }
                            }
                    }
                ))
        self.d.write('test_corrupted.json', 'corrupted json')

    def tearDown(self):
        self.d.cleanup()

    def test_can_set_fixture_source_directory(self):
        fixtureload.set_source_dir('/tmp')
        assert fixtureload.fixture_source_dir == '/tmp'

    def test_can_load_fixture(self):
        fixture = fixtureload.load('test/description/fixture0')
        compare(fixture, {'a': 'b'})
        fixture = fixtureload.load('test/description/fixture1')
        compare(fixture, {'c': 'd'})

    def test_load_fixture_with_not_existed_file_should_raise(self):
        with ShouldRaise(IOError):
            fixtureload.load('not_existed/description/fixture0')

    def test_load_fixture_with_corrupted_file_should_raise(self):
        with ShouldRaise(ValueError):
            fixtureload.load('test_corrupted/description/fixture0')

    def test_parse_fixture_path(self):
        path = fixtureload.parse_fixture_path('test/desc/fixture0')
        compare(
            path,
            {
                'source_file': 'test.json',
                'fixture_desc': 'desc',
                'fixture': 'fixture0'
            }
        )

    def test_parse_fixture_path_if_path_is_not_valid(self):
        with ShouldRaise(ValueError('Fixture Path is not valid (testfixture0)')):
            fixtureload.parse_fixture_path('testfixture0')
Example #27
0
class TranslationLoaderTest(TestCase):
    u"""
    Tests ``TranslationLoader`` template loader. Checks that the loader loads original template
    only if there is no translated template for the active language. If there is a translated
    template for the active languate, the loader loads this translated template. Also tests that an
    exception is raised if there is no original nor translated template.
    """
    def setUp(self):
        self.tempdir = TempDirectory()

        self.settings_override = override_settings(
            LANGUAGES=((u'de', u'Deutsch'), (u'en', u'English'),
                       (u'fr', u'Francais')),
            TEMPLATE_LOADERS=(
                (u'poleno.utils.template.TranslationLoader',
                 u'django.template.loaders.filesystem.Loader'), ),
            TEMPLATE_DIRS=(self.tempdir.path, ),
        )
        self.settings_override.enable()

        self.tempdir.write(u'first.html', u'(first.html)\n')
        self.tempdir.write(u'first.en.html', u'(first.en.html)\n')
        self.tempdir.write(u'second.de.html', u'(second.de.html)\n')

    def tearDown(self):
        self.settings_override.disable()
        self.tempdir.cleanup()

    def test_translated_template_has_priority(self):
        # Existing: first.html, first.en.html
        with translation(u'en'):
            rendered = squeeze(render_to_string(u'first.html'))
            self.assertEqual(rendered, u'(first.en.html)')

    def test_with_only_translated_template(self):
        # Existing: second.de.html
        # Missing: second.html
        with translation(u'de'):
            rendered = squeeze(render_to_string(u'second.html'))
            self.assertEqual(rendered, u'(second.de.html)')

    def test_with_only_untranslated_template(self):
        # Existing: first.html
        # Missing: first.de.html
        with translation(u'de'):
            rendered = squeeze(render_to_string(u'first.html'))
            self.assertEqual(rendered, u'(first.html)')

    def test_missing_template_raises_exception(self):
        # Missing: second.html, second.en.html
        with self.assertRaises(TemplateDoesNotExist):
            render_to_string(u'second.html')
class Test_OptionParsing_and_Validation(TestCase):
    def setUp(self):
        self.tempdir = TempDirectory()
        self.config_dict = {
        }
        self.config_path = os.path.join(self.tempdir.path, 'config.ini')
        write_config_to_filename(self.config_dict, self.config_path)

    def tearDown(self):
        self.tempdir.cleanup()

    def test_non_destructive_with_sys_argv(self):
        expected_sys_argv = ['', '-c', self.config_path]
        sys.argv = expected_sys_argv[:]
        non_destructive_minimal_parse_and_validate_args()
        self.assertEqual(expected_sys_argv, sys.argv)

    def test_validation_raises_exception_if_config_file_does_not_exist(self):
        def validate():
            parser, options, args = common_parse_args(args=[
                '--config', '/tmp/blah/blah/bbpgsql.ini'])
            common_validate_options_and_args(options, args)
        self.assertRaises(Exception, validate)

    def test_validation_raises_exception_if_config_file_permissions_too_open(
        self):
        with TempDirectory() as d:
            self.parent_dir = d.makedir('parent_dir')
            self.config_path = d.write('parent_dir/config.ini', '')
            self.open_perm = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
            os.chmod(self.config_path, self.open_perm)

            def validate(config_path):
                parser, options, args = common_parse_args(args=[
                    '--config', config_path])
                common_validate_options_and_args(options, args)
            self.assertRaises(Exception, validate, self.config_path)

    def test_options_validate_if_config_file_exists(self):
        parser, options, args = common_parse_args(args=[
            '--config', self.config_path])
        self.assertTrue(common_validate_options_and_args(options, args))

    def test_validation_raises_exception_if_cannot_read_config_file(self):
        def validate():
            parser, options, args = common_parse_args(args=[
                '--config', self.config_path])
            self.no_perm = 0
            os.chmod(self.config_path, self.no_perm)
            common_validate_options_and_args(options, args)
        self.assertRaises(Exception, validate)
class TranslationLoaderTest(TestCase):
    u"""
    Tests ``TranslationLoader`` template loader. Checks that the loader loads original template
    only if there is no translated template for the active language. If there is a translated
    template for the active languate, the loader loads this translated template. Also tests that an
    exception is raised if there is no original nor translated template.
    """

    def setUp(self):
        self.tempdir = TempDirectory()

        self.settings_override = override_settings(
            LANGUAGES=((u'de', u'Deutsch'), (u'en', u'English'), (u'fr', u'Francais')),
            TEMPLATE_LOADERS=((u'poleno.utils.template.TranslationLoader', u'django.template.loaders.filesystem.Loader'),),
            TEMPLATE_DIRS=(self.tempdir.path,),
            )
        self.settings_override.enable()

        self.tempdir.write(u'first.html', u'(first.html)\n')
        self.tempdir.write(u'first.en.html', u'(first.en.html)\n')
        self.tempdir.write(u'second.de.html', u'(second.de.html)\n')

    def tearDown(self):
        self.settings_override.disable()
        self.tempdir.cleanup()


    def test_translated_template_has_priority(self):
        # Existing: first.html, first.en.html
        with translation(u'en'):
            rendered = squeeze(render_to_string(u'first.html'))
            self.assertEqual(rendered, u'(first.en.html)')

    def test_with_only_translated_template(self):
        # Existing: second.de.html
        # Missing: second.html
        with translation(u'de'):
            rendered = squeeze(render_to_string(u'second.html'))
            self.assertEqual(rendered, u'(second.de.html)')

    def test_with_only_untranslated_template(self):
        # Existing: first.html
        # Missing: first.de.html
        with translation(u'de'):
            rendered = squeeze(render_to_string(u'first.html'))
            self.assertEqual(rendered, u'(first.html)')

    def test_missing_template_raises_exception(self):
        # Missing: second.html, second.en.html
        with self.assertRaises(TemplateDoesNotExist):
            render_to_string(u'second.html')
Example #30
0
class copySpecialTestCase(unittest.TestCase):

    def setUp(self):
        self.inputDirectory = []
        self.specialFilesNamesAbsolutePath = []
        self.specialFileNames =[]
        self.inputDirectoriesPath = []
        for i in range(0,2):
            self.inputDirectory.append(TempDirectory())
            self.inputDirectory[i].write('india__{0}{0}{0}__.txt'.format(i), 'some foo thing')
            self.inputDirectory[i].write('god__{0}{0}{0}__.txt'.format(i), 'some foo thing')
            self.inputDirectory[i].write('hi.txt', 'some foo thing')

            self.specialFilesNamesAbsolutePath.append('{0}/india__{1}{1}{1}__.txt'.format(self.inputDirectory[i].path, i))
            self.specialFilesNamesAbsolutePath.append('{0}/god__{1}{1}{1}__.txt'.format(self.inputDirectory[i].path, i))
            self.specialFileNames.append('india__{0}{0}{0}__.txt'.format(i))
            self.specialFileNames.append('god__{0}{0}{0}__.txt'.format(i))
            self.inputDirectoriesPath.append(self.inputDirectory[i].path)

        self.outputDirectory = TempDirectory()
        self.outputDirectory.path = self.outputDirectory.path

    def test_getSpecialFilesNames(self):
        copySpecialFiles = CopySpecialFiles()
        for path in self.inputDirectoriesPath:
            copySpecialFiles.specialFileNames.extend(copySpecialFiles.getSpecialFilesNames(path))
        self.assertEqual(sorted(self.specialFilesNamesAbsolutePath), sorted(copySpecialFiles.specialFileNames))

    def test_copyToDirectory(self):
        copySpecialFiles = CopySpecialFiles()
        for path in self.inputDirectoriesPath:
            copySpecialFiles.specialFileNames.extend(copySpecialFiles.getSpecialFilesNames(path))
        copySpecialFiles.outputDirectory = self.outputDirectory.path
        copySpecialFiles.copyToDirectory()
        self.assertEqual(sorted(os.listdir(self.outputDirectory.path)), sorted(self.specialFileNames))

    def test_copyToZip(self):
        copySpecialFiles = CopySpecialFiles()
        for path in self.inputDirectoriesPath:
            copySpecialFiles.specialFileNames.extend(copySpecialFiles.getSpecialFilesNames(path))
        copySpecialFiles.zipFile = "dummy.zip"
        copySpecialFiles.copyToZip()
        with zipfile.ZipFile('dummy.zip', 'r') as myzip:
            self.assertEqual(sorted(myzip.namelist()), sorted(self.specialFileNames))
        os.remove("dummy.zip")

    def tearDown(self):
        for inputDirectory in self.inputDirectory:
            inputDirectory.cleanup()
        self.outputDirectory.cleanup()
Example #31
0
class HomeDirTest(unittest.TestCase):
    def setUp(self):
        self.temp_dir = TempDirectory(create=True)
        self.home = PathHomeDir(self.temp_dir.path)

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_read(self):
        self.temp_dir.write("filename", "contents")
        self.assertEquals(self.home.read("filename"), "contents")

    def test_write(self):
        self.temp_dir.write("existing_file", "existing_contents")
        self.home.write("new_file", "contents")
        self.home.write("existing_file", "new_contents")
        self.assertEquals(self.temp_dir.read("existing_file"), "new_contents")
        self.assertEquals(self.temp_dir.read("new_file"), "contents")

    def test_config_file(self):
        with collect_outputs() as outputs:
            self.home.write_config_file("new config")
            self.temp_dir.check(".cosmosrc")
            self.assertEquals(self.home.read_config_file(), "new config")
            self.assertIn("Settings saved", outputs.stdout.getvalue())
            file_mode = os.stat(self.temp_dir.getpath(".cosmosrc")).st_mode
            self.assertEquals(file_mode,
                              stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR)

    def test_override_config_file(self):
        with collect_outputs():
            other_config = self.temp_dir.write("path/other", "config")
            self.assertEquals(
                self.home.read_config_file(filename_override=other_config),
                "config")

    def test_warn_on_unprotected_config_file(self):
        with collect_outputs() as outputs:
            self.home.write_config_file("new config")
            config_path = self.temp_dir.getpath(".cosmosrc")
            os.chmod(config_path, 0777)
            self.home.read_config_file()
            assertFunc = (self.assertNotIn
                          if os.name == 'nt' else self.assertIn)
            assertFunc("WARNING", outputs.stderr.getvalue())

    def test_last_cluster(self):
        self.home.write_last_cluster("0000000")
        self.temp_dir.check(".cosmoslast")
        self.assertEquals(self.home.read_last_cluster(), "0000000")
Example #32
0
 def test_dont_cleanup_with_path(self):
     d = mkdtemp()
     fp = os.path.join(d, 'test')
     with open(fp, 'w') as f:
         f.write('foo')
     try:
         td = TempDirectory(path=d)
         self.assertEqual(d, td.path)
         td.cleanup()
         self.assertEqual(os.listdir(d), ['test'])
         with open(fp) as f:
             self.assertEqual(f.read(), 'foo')
     finally:
         rmtree(d)
class HomeDirTest(unittest.TestCase):

    def setUp(self):
        self.temp_dir = TempDirectory(create=True)
        self.home = PathHomeDir(self.temp_dir.path)

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_read(self):
        self.temp_dir.write("filename", "contents")
        self.assertEquals(self.home.read("filename"), "contents")

    def test_write(self):
        self.temp_dir.write("existing_file", "existing_contents")
        self.home.write("new_file", "contents")
        self.home.write("existing_file", "new_contents")
        self.assertEquals(self.temp_dir.read("existing_file"),
                          "new_contents")
        self.assertEquals(self.temp_dir.read("new_file"), "contents")

    def test_config_file(self):
        with collect_outputs() as outputs:
            self.home.write_config_file("new config")
            self.temp_dir.check(".cosmosrc")
            self.assertEquals(self.home.read_config_file(), "new config")
            self.assertIn("Settings saved", outputs.stdout.getvalue())
            file_mode = os.stat(self.temp_dir.getpath(".cosmosrc")).st_mode
            self.assertEquals(file_mode, stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR)

    def test_override_config_file(self):
        with collect_outputs():
            other_config = self.temp_dir.write("path/other", "config")
            self.assertEquals(
                self.home.read_config_file(filename_override=other_config),
                "config")

    def test_warn_on_unprotected_config_file(self):
        with collect_outputs() as outputs:
            self.home.write_config_file("new config")
            config_path = self.temp_dir.getpath(".cosmosrc")
            os.chmod(config_path, 0777)
            self.home.read_config_file()
            assertFunc = (self.assertNotIn if os.name=='nt' else self.assertIn)
            assertFunc("WARNING", outputs.stderr.getvalue())

    def test_last_cluster(self):
        self.home.write_last_cluster("0000000")
        self.temp_dir.check(".cosmoslast")
        self.assertEquals(self.home.read_last_cluster(), "0000000")
class Test_FilesystemCommitStorage(TestCase):
    def setUp(self):
        self.tempdir = TempDirectory()
        self.commit_storage_path = self.tempdir.makedir('commit_storage')
        self.config = config()
        self.config.set('WAL', 'driver', 'filesystem')
        self.config.set('WAL', 'path', self.commit_storage_path)

    def tearDown(self):
        self.tempdir.cleanup()

    def test_will_build_storage_from_config(self):
        self.assertEqual(FilesystemCommitStorage,
            type(get_repository_storage_from_config(self.config, 'WAL')))
Example #35
0
class TestNumpyArrayReader(TestCase):
    input_array = [1, 2, 3, 4]

    def setUp(self):
        self.d = TempDirectory()
        NumpyArrayWriter.save_data(self.d.path + '\\test.npy',
                                   self.input_array)
        self.reader = NumpyArrayReader(self.d.path + '\\test.npy')

    def test_resource_should_close_it_self(self):
        self.assertTrue(self.reader.readable())

    def test_close(self):
        self.reader.close()

        self.assertFalse(self.reader.readable(), 'file is not closed')

    def test_read(self):
        data = self.reader.load_data()

        npt.assert_array_equal([self.input_array], data)

    def test_read_multiple_lines(self):
        second_input = [123, 4, 5, 6]
        NumpyArrayWriter.save_data(self.d.path + '\\test.npy', second_input)

        data = self.reader.load_data()

        npt.assert_array_equal([self.input_array, second_input], data)

    def test_read_one_line_multiple_times(self):
        second_input = [123, 4, 5, 6]
        NumpyArrayWriter.save_data(self.d.path + '\\test.npy', second_input)

        data = self.reader.load_data(nr_of_lines=1)

        npt.assert_array_equal([self.input_array], data)

        data = self.reader.load_data(nr_of_lines=1)

        npt.assert_array_equal([second_input], data)

        data = self.reader.load_data(nr_of_lines=1)

        self.assertIsNone(data)

    def tearDown(self):
        self.reader.__del__()
        self.d.cleanup()
 def test_dont_cleanup_with_path(self):
     d = mkdtemp()
     fp = os.path.join(d, 'test')
     with open(fp, 'w') as f:
         f.write('foo')
     try:
         td = TempDirectory(path=d)
         self.assertEqual(d, td.path)
         td.cleanup()
         # checks
         self.assertEqual(os.listdir(d), ['test'])
         with open(fp) as f:
             self.assertEqual(f.read(), 'foo')
     finally:
         rmtree(d)
 def test_dont_cleanup_with_path(self):
     d = mkdtemp()
     fp = os.path.join(d, "test")
     with open(fp, "w") as f:
         f.write("foo")
     try:
         td = TempDirectory(path=d)
         self.assertEqual(d, td.path)
         td.cleanup()
         # checks
         self.assertEqual(os.listdir(d), ["test"])
         with file(fp) as f:
             self.assertEqual(f.read(), "foo")
     finally:
         rmtree(d)
Example #38
0
 def test_atexit(self):
     from mock import call
     m = Mock()
     with Replacer() as r:
         r.replace('testfixtures.TempDirectory.atexit_setup', False)
         r.replace('atexit.register', m.register)
         d = TempDirectory()
         expected = [call.register(d.atexit)]
         compare(expected, m.mock_calls)
         with catch_warnings(record=True) as w:
             d.atexit()
             self.assertTrue(len(w), 1)
             compare(str(w[0].message), 'TempDirectory instances not cleaned up by shutdown:\n' + d.path)
         d.cleanup()
         compare(set(), TempDirectory.instances)
         d.atexit()
Example #39
0
class TestSource(TestCase):

    def setUp(self):
        self.dir = TempDirectory()
        self.tb = TestingBase()

    def tearDown(self):
        self.tb.restore()
        self.dir.cleanup()
    
    def test_table(self):
        metadata = MetaData()
        mytable = Table('user', metadata,
                        Column('id', Integer, primary_key=True),
                        Column('name', String(40)),
                        )
        s = Source(mytable)

        # check we have the right tables
        compare(['user'], s.metadata.tables.keys())
        # check we have a new metadata object
        self.failIf(metadata is s.metadata)
        # check we have a copy of the table
        self.failIf(mytable is s.metadata.tables['user'])

    def test_class(self):

        class SomethingElse:
            pass

        if PY2:
            text = (
                "<class mortar_rdb.tests.test_controlled_schema."
                "SomethingElse at [0-9a-zA-Z]+>"
            )
        else:
            text = (
                "<class 'mortar_rdb.tests.test_controlled_schema."
                "TestSource.test_class.<locals>.SomethingElse'>"
            )

        with ShouldRaise(TypeError(S(
                    text+" must be a "
                    "Table object or a declaratively mapped model class."
                    ))):
            s = Source(SomethingElse)
Example #40
0
class TestingDB:
    def __init__(self, filename):
        self._dir_obj = TempDirectory()
        self.dirpath = self._dir_obj.path
        shutil.copy(
            os.path.join(os.getcwd(), "tests", "samples", "databases",
                         filename), os.path.join(self.dirpath, filename))
        self.path = os.path.join(self.dirpath, filename)

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        try:
            self._dir_obj.cleanup()
        except Exception as e:
            pass
class Cmdline_test_skeleton(TestCase):
    __test__ = False  # to prevent nose from running this skeleton
    ARCHIVEPGSQL_PATH = os.path.join('bbpgsql', 'cmdline_scripts')
    CONFIG_FILE = 'config.ini'
    ONE_MEBIBYTE = 1024. * 1024.
    ONE_MEGABYTE = 1000. * 1000.

    exe_script = 'fill this in with your command'

    def setUp(self):
        self.setup_environment_and_paths()
        self.write_config_to_disk()
        self.cmd = [self.exe_script, '--config', self.config_path]
        self.setup_customize()

    def setup_customize(self):
        raise Exception('This is a skeleton for test - you need to provide'
                        'your own customization function, setup_customize')

    def setup_environment_and_paths(self):
        self.env = deepcopy(os.environ)
        self.env['PATH'] = ''.join([
            self.env['PATH'],
            ':',
            self.ARCHIVEPGSQL_PATH])
        self.tempdir = TempDirectory()
        self.setup_environment_and_paths_customize()

    def setup_environment_and_paths_customize(self):
        '''This is an optional function'''
        pass

    def write_config_to_disk(self):
        self.config_path = os.path.join(self.tempdir.path, self.CONFIG_FILE)
        write_config_to_filename(self.setup_config(), self.config_path)
        self.config = get_config_from_filename_and_set_up_logging(
            self.config_path
        )

    def tearDown(self):
        self.tempdir.cleanup()
        self.teardown_customize()

    def teardown_customize(self):
        raise Exception('This is a skeleton for test - you need to provide'
                        'your own customization function, teardown_customize')
class Test_restorewal_requires_WAL_file_and_destination(TestCase):
    def setUp(self):
        self.tempdir = TempDirectory()
        self.config_dict = {
            'General': {
                'pgsql_data_directory': self.tempdir.path,
            },
        }
        self.config_file = os.path.join(self.tempdir.path, 'config_file')
        write_config_to_filename(self.config_dict, self.config_file)
        parser, self.options, self.args = restorewal_parse_args(['-c',
            self.config_file])

    def tearDown(self):
        self.tempdir.cleanup()

    def test_will_raise_exception_if_not_exactly_two_args(self):
        self.assertRaises(
            Exception,
            restorewal_validate_options_and_args,
            self.options,
            []
            )
        self.assertRaises(
            Exception,
            restorewal_validate_options_and_args,
            self.options,
            ['one']
            )
        self.assertRaises(
            Exception,
            restorewal_validate_options_and_args,
            self.options,
            ['one', 'two', 'three']
            )

    def test_is_explicit_about_error(self):
        try:
            restorewal_validate_options_and_args(
                self.options,
                []
                )
        except Exception, e:
            print 'Exception', e
            self.assertTrue('name of the WAL file to retrieve' in str(e))
        else:
class Test_storagestats_rejets_arguments(TestCase):
    def setUp(self):
        self.tempdir = TempDirectory()
        self.config_dict = {
        }
        self.config_path = os.path.join(self.tempdir.path, 'config.ini')
        write_config_to_filename(self.config_dict, self.config_path)

    def tearDown(self):
        self.tempdir.cleanup()

    def test_storagestats_rejects_any_arguments(self):
        def validate():
            parser, options, args = storagestats_parse_args(args=[
                '--config', self.config_path, 'some_argument'])
            storagestats_validate_options_and_args(options, args)
        self.assertRaises(TooManyArgumentsException, validate)
Example #44
0
class test_fdmnes(unittest.TestCase):

    def setUp(self):
        self.dir = TempDirectory()
        self.url = "http://www.crystallography.net/cod/1000058.cif"
        self.element = "Ti"
        self.edge = "K"
        self.fermilevel = 4.9664
        self.MM = 47.867
        self.ciffile = os.path.join(self.dir.path,"rutile.cif")
        self.inputfile = os.path.join(self.dir.path,"rutile.txt")

    def tearDown(self):
        self.dir.cleanup()

    def test_simulate(self):
        
        urllib.urlretrieve(self.url, self.ciffile)
        sim = fdmnes(self.ciffile, resonant=self.element)

        # Settings
        sim.P.Energpho = False # relative energies as output
        sim.P.Radius = 3.5 # Radius of the cluster for calculation
        sim.P.Rpotmax = sim.P.Radius + 5 # Radius of the cluster for potential calculation
        sim.P.Quadrupole = True # multipole approximation
        sim.P.Green = False # MS instead of FDM (faster but less accurate)
        sim.P.TDDFT = True # multi electron correction
        sim.P.Convolution = True # 
        sim.P.Density = False # save density of states
        sim.P.Edge = self.edge
        sim.P.Range = (-10,2,100) # eV

        sim.WriteInputFile(self.inputfile, overwrite=True)

        sim.Run(wait=False)
        while True:
            NumRunning = len(list(sim.jobs.get_running()))
            if NumRunning == 0:
                break
            time.sleep(5)

        data = sim.get_XANES(conv = True)

        energy = data[:,0]/1000. + self.fermilevel # ev -> keV
        mu = data[:,1]*6.022140857e5/self.MM # Absorption cross section (Mbarn/atom) -> mass absorption coefficient (cm^2/g)
Example #45
0
class test_next_output_stem(unittest.TestCase):
    """tests for next_output_stem"""
    #next_output_stem(prefix, path=None, start=1, inc=1, zfill=3,
    #       overwrite=False)

    def setUp(self):
        self.tempdir = TempDirectory()
        self.tempdir.write('a_004', b'some text a4')
        self.tempdir.write('a_005', b'some text a5')
        self.tempdir.write('b_002.txt', b'some text b2')
        self.tempdir.write('b_008.out', b'some text b8')
        self.tempdir.write(('c_010', 'por'), b'some text c5por')

    def tearDown(self):
        self.tempdir.cleanup()

#    @with_setup(setup=self.setup, teardown=self.teardown)
    def test_file(self):
        assert_equal(next_output_stem(prefix='a_', path=self.tempdir.path),
                     'a_006')
    def test_file2(self):
        assert_equal(next_output_stem(prefix='b_', path=self.tempdir.path),
                     'b_009')
    def test_directory(self):
        assert_equal(next_output_stem(prefix='c_', path=self.tempdir.path),
                     'c_011')
    def test_file_overwrite(self):
        assert_equal(next_output_stem(prefix='a_', path=self.tempdir.path,
                                      overwrite=True),
                     'a_005')
    def test_inc(self):
        assert_equal(next_output_stem(prefix='a_', path=self.tempdir.path,
                                      inc=3),
                     'a_008')
    def test_zfill(self):
        assert_equal(next_output_stem(prefix='a_', path=self.tempdir.path,
                                      zfill=5),
                     'a_00006')
    def test_does_not_exist(self):
        assert_equal(next_output_stem(prefix='g_', path=self.tempdir.path),
                     'g_001')
    def test_does_not_exist(self):
        assert_equal(next_output_stem(prefix='g_', path=self.tempdir.path,
                                      start=4),
                     'g_004')
def test_statistics():
    dir = TempDirectory()
    original_dir = os.getcwd()
    with open('tests/notebooks/a.ipynb', 'rb') as input:
        dir.write('a.ipynb', input.read())
        dir.write('__main__.py', b'')
        res = {
            'cells': 25,
            'code': 12,
            'ename': u'KeyError',
            'evalue': u"'engines'",
            'executed': 12,
            'markdown': 13
        }
        os.chdir(dir.path)
        assert (notebook.statistics('a.ipynb') == res)
    os.chdir(original_dir)
    dir.cleanup()
Example #47
0
class test_working_directory(unittest.TestCase):
    """tests working_direcroty"""
    def setUp(self):
        self.tempdir = TempDirectory()
        self.original_dir = os.getcwd()

    def tearDown(self):
        os.chdir(self.original_dir)
        self.tempdir.cleanup()

    def test_directory_change(self):

        assert_equal(os.getcwd(), self.original_dir)

        with working_directory(self.tempdir.path):
            assert_equal(os.getcwd(), self.tempdir.path)

        assert_equal(os.getcwd(), self.original_dir)
Example #48
0
class ModelBuilderTestCase(TestCase):
    def setUp(self):
        # Seed the database from the facescrub index and available images
        seed(max_faces=50)
        # save current working dir
        self.def_dir = os.getcwd()
        # change curret directory to a temp dir
        self.dir = TempDirectory()
        os.chdir(self.dir.path)
        # by changing the current directory we change
        # the location where the .dat files will be created

    def tearDown(self):
        # change current directory back to original
        os.chdir(self.def_dir)
        # delete temp directory
        self.dir.cleanup()

    def test_seed(self):
        self.assertTrue(Face.objects.all().count() > 0)
        self.assertTrue(Actor.objects.all().count() > 0)
        self.assertTrue(User.objects.all().count() > 0)

    def find_similars(self):
        # build or rebuild the datasets
        build_datasets(method_threshold=1000000)
        # find the demo user and get its first face
        user = User.objects.filter(username='******')
        face = user.face_set.first()
        # create a new history item
        history = History.objects.create(user=user, in_face=face)
        history.save()
        # call find similars
        r = find_similars.delay(history.id)
        # wait for it to finish
        while not async_res.ready():
            time.sleep(1)
        # state must be success
        self.assertTrue(not r.failed())
        history_after = History.objects.get(pk=int(r.result))
        # both history items must be the same
        self.assertTrue(history_after.id == history.id)
        # must have found at least one similar face
        self.assertTrue(len(list(history_afterhistoryitem_set.all())) > 0)
class Test_command_dispatch(TestCase):
    archivepgsql_exe = 'archivepgsql'
    archivewal_exe = 'archivewal'
    bbpgsql_exe = 'bbpgsql'

    def setUp(self):
        self.tempdir = TempDirectory()
        self.config_dict = {
        }
        self.config_path = os.path.join(self.tempdir.path, 'config.ini')
        write_config_to_filename(self.config_dict, self.config_path)

    def tearDown(self):
        self.tempdir.cleanup()

    def get_argv_for_cmd(self, cmd):
        return [cmd, '-c', self.config_path]

    def test_dispatch_calls_archivepgsql_main(self,
        mock_restorewal_main,
        mock_archivepgsql_main, mock_archivewal_main, mock_bbpgsql_error):
        bbpgsql_main(self.get_argv_for_cmd(self.archivepgsql_exe))
        mock_archivepgsql_main.assert_called_once_with()
        self.assertFalse(mock_archivewal_main.called)
        self.assertFalse(mock_restorewal_main.called)
        self.assertFalse(mock_bbpgsql_error.called)

    def test_dispatch_calls_only_archivewal_main(self,
        mock_restorewal_main,
        mock_archivepgsql_main, mock_archivewal_main, mock_bbpgsql_error):
        bbpgsql_main(self.get_argv_for_cmd(self.archivewal_exe))
        mock_archivewal_main.assert_called_once_with()
        self.assertFalse(mock_archivepgsql_main.called)
        self.assertFalse(mock_restorewal_main.called)
        self.assertFalse(mock_bbpgsql_error.called)

    def test_dispatch_calls_only_bbpgsql_error(self,
        mock_restorewal_main,
        mock_archivepgsql_main, mock_archivewal_main, mock_bbpgsql_error):
        bbpgsql_main(self.get_argv_for_cmd(self.bbpgsql_exe))
        mock_bbpgsql_error.assert_called_once_with()
        self.assertFalse(mock_archivepgsql_main.called)
        self.assertFalse(mock_archivewal_main.called)
        self.assertFalse(mock_restorewal_main.called)
Example #50
0
class Test_archivepgsql_BasicCommandLineOperation(TestCase):
    ARCHIVEPGSQL_PATH = os.path.join('bbpgsql', 'cmdline_scripts')
    CONFIG_FILE = 'config.ini'
    exe_script = 'archivepgsql'

    def setUp(self):
        self.setup_environment()
        self.setup_config()
        self.cmd = [self.exe_script, '--dry-run', '--config', self.config_path]

    def setup_environment(self):
        self.env = deepcopy(os.environ)
        self.env['PATH'] = ''.join([
            self.env['PATH'],
            ':',
            self.ARCHIVEPGSQL_PATH])
        self.tempdir = TempDirectory()
        self.data_dir = self.tempdir.makedir('pgsql_data')

    def setup_config(self):
        self.config_path = os.path.join(self.tempdir.path, self.CONFIG_FILE)
        self.config_dict = {
            'General': {
                'pgsql_data_directory': self.data_dir,
            },
            'Snapshot': {
                'driver': 'memory',
            },
        }
        write_config_to_filename(self.config_dict, self.config_path)
        self.config = get_config_from_filename_and_set_up_logging(
            self.config_path
        )

    def tearDown(self):
        self.tempdir.cleanup()

    def test_can_execute_archivepgsql(self):
        check_call(self.cmd, env=self.env, stdout=PIPE)

    def test_obeys_dry_run_option(self):
        proc = Popen(self.cmd, env=self.env, stdout=PIPE)
        stdoutdata, stderrdata = proc.communicate()
        self.assertEqual("Dry Run\n", stdoutdata)
Example #51
0
class ImportEportTest(unittest.TestCase):
    referenceMapPath = ""
    referenceData = ""
    directory = None

    def readFile(self, filePath):
        file = open(filePath)
        content = file.read()
        file.close()
        return content

    def copyMaterials(self, filepath):
        command = "grep -E 'materials|material |map>' '" + self.referenceMapPath + "' > '" + filepath + "'"
        process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
        process.wait()
        process.communicate()

    def setUp(self):
        print(self.id())
        self.directory = TempDirectory()

    def tearDown(self):
        self.referenceMapPath = ""
        self.directory.cleanup()

    def testImportExport(self):
        self.referenceMapPath = "data/maps/importExportTest.xml"
        self.referenceData = self.readFile(self.referenceMapPath)

        importer = RadixMapEditor.importer.Importer(self.referenceMapPath,
                                                    True)
        importer.execute(bpy.context)

        filepath = os.path.join(self.directory.path, "importExportTest.xml")
        self.copyMaterials(filepath)

        exporter = RadixMapEditor.Exporter.Exporter(filepath)
        exporter.execute(bpy.context)

        testData = self.readFile(filepath)

        self.assertEqual(testData, self.referenceData,
                         "Files are not equal.\n")
Example #52
0
class test_working_directory(unittest.TestCase):
    """tests working_direcroty"""

    def setUp(self):
        self.tempdir = TempDirectory()
        self.original_dir = os.getcwd()

    def tearDown(self):
        os.chdir(self.original_dir)
        self.tempdir.cleanup()

    def test_directory_change(self):

        assert_equal(os.getcwd(), self.original_dir)

        with working_directory(self.tempdir.path):
            assert_equal(os.getcwd(), self.tempdir.path)

        assert_equal(os.getcwd(), self.original_dir)
Example #53
0
class ActionHandlerTest(unittest.TestCase):
    def setUp(self):
        self.event = Mock()
        self.event.body = {}
        self.event.headers = {}
        self.context = Mock()

        self.temp_dir = TempDirectory()

        os.environ['appname'] = 'appname'
        os.environ['secrets'] = self.temp_dir.path

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_secret(self):
        self.temp_dir.write('test-secret', b'top-secret-info')
        secret = base.secret('test-secret')
        self.assertEqual('top-secret-info', secret)
class TestSource(TestCase):
    def setUp(self):
        self.dir = TempDirectory()
        self.tb = TestingBase()

    def tearDown(self):
        self.tb.restore()
        self.dir.cleanup()

    def test_table(self):
        metadata = MetaData()
        mytable = Table(
            'user',
            metadata,
            Column('id', Integer, primary_key=True),
            Column('name', String(40)),
        )
        s = Source(mytable)

        # check we have the right tables
        compare(['user'], s.metadata.tables.keys())
        # check we have a new metadata object
        self.failIf(metadata is s.metadata)
        # check we have a copy of the table
        self.failIf(mytable is s.metadata.tables['user'])

    def test_class(self):
        class SomethingElse:
            pass

        if PY2:
            text = ("<class mortar_rdb.tests.test_controlled_schema."
                    "SomethingElse at [0-9a-zA-Z]+>")
        else:
            text = ("<class 'mortar_rdb.tests.test_controlled_schema."
                    "TestSource.test_class.<locals>.SomethingElse'>")

        with ShouldRaise(
                TypeError(
                    S(text + " must be a "
                      "Table object or a declaratively mapped model class."))):
            s = Source(SomethingElse)
Example #55
0
class PackageTest(TestCase):
    def setUp(self):
        self.dir = TempDirectory(ignore=['pyc$', 'pyo$'])
        self.added_to_sys = []
        # now add the path to sys.path
        sys.path.append(self.dir.path)
        # keep a set of modules, so we can delete any that get added
        self.modules = set(sys.modules)
        # create a handy Replacer
        self.r = Replacer()
        # make a package
        self.dir.write('x/__init__.py', b'')
        self.dir.write('x/y/__init__.py', b'')
        self.dir.write('x/y/z/__init__.py', b'')

    def tearDown(self):
        self.r.restore()
        sys.path.remove(self.dir.path)
        for name in set(sys.modules) - self.modules:
            del sys.modules[name]
        self.dir.cleanup()
Example #56
0
class TestHelpers(object):
    def setUp(self):
        self.dir = TempDirectory()
        self.missing = self.dir.getpath('missing')
        self.path = search_path()

    def tearDown(self):
        self.dir.cleanup()

    def run_main(self, args=(), output='', return_code=0):
        # so we don't leave log handlers lying around...
        # ...level is so that we check the log level is correctly set
        # in setup_logging
        with LogCapture(level=100):
            with Replacer() as r:
                # set argv
                argv = ['x'] + args
                r.replace('sys.argv', argv)
                r.replace('picky.main.datetime',
                          test_datetime(2001, 1, 2, 3, 4, 5))
                # set PATH env variable
                r.replace('os.environ.PATH', self.path)
                # change to tempdir
                cwd = os.getcwd()
                try:
                    os.chdir(self.dir.path)
                    # get the exit code
                    with ShouldRaise(SystemExit) as s:
                        # capture output
                        with OutputCapture() as actual:
                            main()
                finally:
                    os.chdir(cwd)

        # compare output, with timestamp subbed out
        captured = re.sub('[\d\- :]{19}', '(ts)', actual.captured)
        compare(output, captured)

        # compare return code
        compare(return_code, s.raised.code)