Exemple #1
0
def test_importing_bad_file():
    """Assert that importing a class with bugs raises an import error."""
    with Patcher() as patcher:
        patcher.fs.create_file("some_bad_test.py",
                               contents="from lie import non_existing")

        with pytest.raises(ImportError):
            discover_tests_under_paths(["some_bad_test.py"])
    def setUp(self):
        self.patcher = Patcher()
        self.patcher.setUp()  # called in the initialization code

        # app files
        self.patcher.fs.CreateFile('/tmp/project/project/templates/abc.html')
        self.patcher.fs.CreateFile(
            '/tmp/project/my_app/templates/my_app/def.html')
        self.patcher.fs.CreateFile(
            '/tmp/project/your_app/templates/your_app/def.html')

        self.mock_apps = Apps(
            collections.OrderedDict([
                ('project', AppConfig('/tmp/project/project/')),
                ('my_app', AppConfig('/tmp/project/my_app/')),
                ('your_app', AppConfig('/tmp/project/your_app/'))
            ]))
Exemple #3
0
 def test_start_flushing4(self):
     with Patcher() as _:
         self.observer.session = self.session
         self.observer.sleep = mock.MagicMock()
         self.observer.flushing = True
         yield self.observer.start_flushing(self.session)
         self.assertNotEqual(self.observer.logs, 'sdfwef')
         self.assertEqual(self.observer.sessions, [self.session])
Exemple #4
0
def test_build_function_errors():
    """This test calls fuzzer_module.build() under circumstances in
    which it should throw an exception. If the call exceptions, the
    test passes, otherwise the test fails. This ensures that we can
    properly detect build failures."""
    for fuzzer_module in _get_all_fuzzer_modules():
        with pytest.raises(Exception), Patcher():
            fuzzer_module.build()
Exemple #5
0
 def test_fail(self):
     file = 'amazon-dash.yml'
     with Patcher() as patcher:
         patcher.fs.CreateFile(file, contents='invalid config')
         os.chown(file, 0, 0)
         os.chmod(file, 0o600)
         with self.assertRaises(InvalidConfig):
             check_config(file)
Exemple #6
0
    def test_start_flushing3(self):
        with Patcher() as patcher:
            patcher.fs.CreateFile(self.observer.recovery_file(self.session), contents='sdfwef')

            self.observer.session = self.session
            self.observer.sleep = mock.MagicMock()
            yield self.observer.start_flushing(self.session)
            self.assertNotEqual(self.observer.logs, 'sdfwef')
Exemple #7
0
 def test_yaml_exception(self):
     file = 'config.yml'
     with Patcher() as patcher:
         patcher.fs.CreateFile(file, contents='\x00')
         os.chown(file, 0, 0)
         os.chmod(file, 0o660)
         with self.assertRaises(InvalidConfig):
             Config('config.yml')
Exemple #8
0
 def test_invalid_config(self, getuid_mock):
     file = 'amazon-dash.yml'
     with Patcher() as patcher:
         patcher.fs.CreateFile(file, contents='invalid config')
         os.chown(file, 1000, 1000)
         os.chmod(file, 0o660)
         with self.assertRaises(InvalidConfig):
             Config(file)
Exemple #9
0
 def test_root(self, getuid_mock):
     file = 'amazon-dash.yml'
     with Patcher() as patcher:
         patcher.fs.CreateFile(file, contents=config_data)
         os.chown(file, 0, 0)
         os.chmod(file, 0o660)
         Config(file)
     patcher.tearDown()
Exemple #10
0
    def test_validate_settings(self):
        class TestSession(CommonSession):
            pass

        with Patcher() as patcher:
            file = os.path.join(self.session.component_schemas_path(), 'settings.json')
            patcher.fs.CreateFile(file, contents='{"type": "object", "properties": {"test": {"type": "string"}}, "required": ["test"]}')
            self.assertRaisesRegex(ValidationError, '\'test\' is a required property', TestSession)
    def test_upload_csv_to_s3_success_returns_correctly(
            self, airflow_context, bucket_names, home_directory_res):
        """tests call to boto library to upload a file is actually made
        and return a correct status.
        """

        # Arrange
        # get the current pipeline info
        pipeline_name = airflow_context['dag'].dag_id

        # bucket_name = None
        # bucket_name = self.setUp(bucket_names[0])
        bucket_name = bucket_names[0]

        # setup a Mock of the boto3 resources and file upload functions
        upload_client = MagicMock(spec=boto3.client('s3'))
        resource_client = MagicMock(spec=boto3.resource('s3'))
        upload_client.upload_file.side_effect = lambda fname, bname, key: None
        resource_client.buckets.all.side_effect = lambda: bucket_names

        # S3 bucket to upload the file to
        # bucket_name = 'tempus-challenge-csv-headlines'

        # path to the fake news and csv directories the function under test
        # uses
        csv_dir = os.path.join(home_directory_res, 'tempdata', pipeline_name,
                               'csv')

        news_dir = os.path.join(home_directory_res, 'tempdata', pipeline_name,
                                'news')

        # create dummy csv files that will be uploaded by the function
        full_file_path = os.path.join(csv_dir, 'stuff.csv')

        with Patcher() as patcher:
            # setup pyfakefs - the fake filesystem
            patcher.setUp()

            # create a fake filesystem directory and place the dummy csv files
            # in that directory to test the method
            patcher.fs.create_dir(csv_dir)
            patcher.fs.create_dir(news_dir)
            patcher.fs.create_file(full_file_path, contents='1,dummy,txt')

            # Act
            # attempt uploading a file to a valid s3 bucket
            result = c.UploadOperations.upload_csv_to_s3(
                csv_dir, bucket_name, upload_client, resource_client,
                **airflow_context)

            # clean up and remove the fake filesystem
            patcher.tearDown()

        # Assert

        # ensure the boto3 upload_file() function was called with correct
        # arguments, resulting outcome should be True from the function
        assert result is True
Exemple #12
0
def fs(request):
    """ Fake filesystem. """
    if hasattr(request, 'param'):
        # pass optional parameters via @pytest.mark.parametrize
        patcher = Patcher(*request.param)
    else:
        patcher = Patcher()
    patcher.setUp()
    yield patcher.fs
    patcher.tearDown()
Exemple #13
0
class GenerateUploadFileIterTest(TestCase):
    def setUp(self):
        self.patcher = Patcher()
        self.patcher.setUp()

        for file in ALL_FILES:
            self.patcher.fs.create_file(file, contents='test')

    def upload_files_to_path_set(self, file_iter):
        return set([upload_file.path for upload_file in file_iter])

    def test_iter_dir(self):
        file_iter = generate_upload_file_iter(['/dummy'], recursive=True)
        file_set = self.upload_files_to_path_set(file_iter)
        self.assertSetEqual(file_set, REGULAR_FILE_SET)

    def test_iter_dir_hidden(self):
        file_iter = generate_upload_file_iter(['/dummy'],
                                              recursive=True,
                                              ignore_hidden_files=False)
        file_set = self.upload_files_to_path_set(file_iter)
        self.assertSetEqual(file_set, ALL_FILES)

    def test_iter_file(self):
        file = '/dummy/file1.txt'
        file_iter = generate_upload_file_iter([file])
        file_set = self.upload_files_to_path_set(file_iter)
        self.assertSetEqual(file_set, set([file]))

    def test_iter_dir_with_reject(self):
        dir_path = '/dummy'
        file_iter = generate_upload_file_iter([dir_path], recursive=False)
        with self.assertRaises(InvalidPathException) as context:
            list(file_iter)
        assert context.exception.path == dir_path

    def test_iter_dir_with_invalid_path(self):
        invalid_path = '/invalid'
        file_iter = generate_upload_file_iter([invalid_path])
        with self.assertRaises(InvalidPathException) as context:
            list(file_iter)
        assert context.exception.path == invalid_path

    def tearDown(self):
        self.patcher.tearDown()
Exemple #14
0
def source_fs(request):
    patcher = Patcher()
    patcher.setUp()
    request.addfinalizer(patcher.tearDown)

    patcher.fs.create_file('/home/foo/src/bar-project/config.json')
    patcher.fs.create_file('/home/foo/src/bar-project/.gitignore')
    patcher.fs.create_file('/home/foo/src/bar-project/.git/config')
    patcher.fs.create_file('/home/foo/src/bar-project/py-lambda-packer.yaml')
    patcher.fs.create_file(
        '/home/foo/src/bar-project/templates/images/index.png')
    patcher.fs.create_file('/home/foo/src/bar-project/templates/index.html')
    patcher.fs.create_file('/home/foo/src/bar-project/static/images/hello.png')
    patcher.fs.create_file('/home/foo/src/bar-project/static/images/thumb.png')
    patcher.fs.create_file('/home/foo/src/bar-project/static/images/large.png')
    patcher.fs.create_file('/home/foo/src/bar-project/static/images/large.jpg')
    patcher.fs.create_file('/home/foo/src/bar-project/static/images/large.gif')
    patcher.fs.create_dir('/home/foo/src/bar-project/static/css')
    patcher.fs.create_dir('/home/foo/src/bar-project/static/js')
    patcher.fs.create_file('/home/foo/src/config/global-config.json')

    patcher.fs.create_file('/home/foo/src/bar-project/posts/a/b/c/d/bw.html')
    patcher.fs.create_file('/home/foo/src/bar-project/posts/a1/b/diff/bw.html')
    patcher.fs.create_file(
        '/home/foo/src/bar-project/posts/a/b/c/d/e/bar.html')
    patcher.fs.create_file(
        '/home/foo/src/bar-project/posts/a/b/c/d/e/got.html')
    patcher.fs.create_file('/home/foo/src/bar-project/posts/a/b/c/d/tess.html')
    patcher.fs.create_file('/home/foo/src/bar-project/posts/a/b/c/d/tess.txt')
    patcher.fs.create_file('/home/foo/src/bar-project/posts/a/ref-90.html')
    patcher.fs.create_file(
        '/home/foo/src/bar-project/posts/bucket/link-00.html')
    patcher.fs.create_file(
        '/home/foo/src/bar-project/posts/bucket/link-03.html')

    patcher.fs.create_symlink(
        '/home/foo/src/bar-project/posts/a/b/c/d/symlink-dir',
        '/home/foo/src/bar-project/posts/bucket')
    patcher.fs.create_symlink(
        '/home/foo/src/bar-project/posts/links/tef-90.html',
        '/home/foo/src/bar-project/posts/a/b/dpo.html')

    patcher.fs.create_dir('/home/foo/tmp')

    return patcher.fs
Exemple #15
0
 def testCollectValueErr(self, wr):
     wr.side_effect = ValueError(
         'ZIP does not support timestamps before 1980')
     with Patcher() as patcher:
         patcher.fs.create_dir(constants.SYS_LOGS_PATH)
         patcher.fs.create_file(
             os.path.join(constants.SYS_LOGS_PATH, 'log1.log'))
         with self.assertRaises(logs.LogError):
             logs.Collect(r'C:\glazier.zip')
    def test_upload_csv_to_s3_no_csvs_in_directory_fails(
            self, airflow_context, bucket_names, home_directory_res):
        """function fails if there are no csv-headline files in the
        directory.
        """

        # Arrange

        # setup a Mock of the boto3 resources and file upload functions
        upload_client = MagicMock(spec=boto3.client('s3'))
        resource_client = MagicMock(spec=boto3.resource('s3'))
        upload_client.upload_file.side_effect = lambda: None
        resource_client.buckets.all.side_effect = lambda: bucket_names

        # get the current pipeline info
        pipeline_name = airflow_context['dag'].dag_id

        # S3 bucket to upload the file to
        bucket_name = 'tempus-challenge-csv-headlines'

        # path to fake news and csv directories
        csv_dir = os.path.join(home_directory_res, 'tempdata', pipeline_name,
                               'csv')

        news_dir = os.path.join(home_directory_res, 'tempdata', pipeline_name,
                                'news')

        # create dummy non-csv files
        full_file_path_one = os.path.join(csv_dir, 'stuff1.txt')
        full_file_path_two = os.path.join(csv_dir, 'stuff2.rtf')
        full_file_path_three = os.path.join(csv_dir, 'stuff3.doc')

        with Patcher() as patcher:
            # setup pyfakefs - the fake filesystem
            patcher.setUp()

            # create a fake filesystem directory and files to test the method
            patcher.fs.create_dir(csv_dir)
            patcher.fs.create_dir(news_dir)
            patcher.fs.create_file(full_file_path_one, contents='dummy txt')
            patcher.fs.create_file(full_file_path_two, contents='dummy rtf')
            patcher.fs.create_file(full_file_path_three, contents='dummy doc')

            # Act
            # function should raise errors on an empty directory
            with pytest.raises(FileNotFoundError) as err:
                c.UploadOperations.upload_csv_to_s3(csv_dir, bucket_name,
                                                    upload_client,
                                                    resource_client,
                                                    **airflow_context)

            actual_message = str(err.value)
            # clean up and remove the fake filesystem
            patcher.tearDown()

        # Assert
        assert "Directory has no csv-headline files" in actual_message
Exemple #17
0
def afs(request, monkeypatch):
    """ Fake filesystem. """
    patcher = Patcher()

    with patch.multiple('pyfakefs.fake_filesystem',
                        FakeOsModule=FakeOsModule,
                        FakeFileOpen=FakeFileOpen):
        patcher.setUp()

    attr = find_sync_open_attr()

    monkeypatch.setattr(threadpool, attr, patcher.fake_open)

    request.addfinalizer(patcher.tearDown)

    patcher.fs.fake_open = patcher.fake_open

    return patcher.fs
Exemple #18
0
 def test_is_necessary(self):
     with Patcher() as patcher:
         os.makedirs(SYSTEMD_PATHS[0])
         InstallSystemd().is_necessary()
         path = os.path.join(SYSTEMD_PATHS[0],
                             os.path.split(SYSTEMD_SERVICE)[1])
         patcher.fs.CreateFile(path)
         with self.assertRaises(IsNecessaryException):
             InstallSystemd().is_necessary()
Exemple #19
0
 def test_installation(self):
     with Patcher() as patcher:
         os.makedirs(os.path.dirname(CONFIG_PATH))
         patcher.fs.CreateFile(CONFIG_EXAMPLE)
         InstallConfig().installation()
         stat = os.stat(CONFIG_PATH)
         self.assertEqual(stat.st_mode, 0o100600)
         self.assertEqual(stat.st_uid, 0)
         self.assertEqual(stat.st_gid, 0)
Exemple #20
0
 def test_root_error(self, getuid_mock, file_owner_mock, file_group_mock):
     file = 'amazon-dash.yml'
     with Patcher() as patcher:
         patcher.fs.CreateFile(file, contents=config_data)
         os.chown(file, 1000, 1000)
         os.chmod(file, 0o660)
         with self.assertRaises(SecurityException):
             Config(file)
     patcher.tearDown()
    def test_upload_csv_to_s3_non_existent_bucket_fails(
            self, airflow_context, bucket_names, home_directory_res):
        """uploading fails if the s3 bucket location does not already exist."""

        # Arrange

        # setup a Mock of the boto3 resources and file upload functions
        upload_client = MagicMock(spec=boto3.client('s3'))
        resource_client = MagicMock(spec=boto3.resource('s3'))
        upload_client.upload_file.side_effect = lambda: None
        resource_client.buckets.all.side_effect = lambda: bucket_names

        # get the current pipeline info
        pipeline_name = airflow_context['dag'].dag_id

        # S3 bucket to upload the file to
        bucket_name = 'non-existent-bucket-name'

        # path to fake news and csv directories the function under test uses
        csv_dir = os.path.join(home_directory_res, 'tempdata', pipeline_name,
                               'csv')

        news_dir = os.path.join(home_directory_res, 'tempdata', pipeline_name,
                                'news')

        # create dummy csv files that will be uploaded by the function
        full_file_path_one = os.path.join(csv_dir, 'stuff1.csv')
        full_file_path_two = os.path.join(csv_dir, 'stuff2.csv')
        full_file_path_three = os.path.join(csv_dir, 'stuff3.csv')

        with Patcher() as patcher:
            # setup pyfakefs - the fake filesystem
            patcher.setUp()

            # create a fake filesystem directory and place the dummy csv files
            # in that directory to test the method
            patcher.fs.create_dir(csv_dir)
            patcher.fs.create_dir(news_dir)
            patcher.fs.create_file(full_file_path_one, contents='1,dummy,txt')
            patcher.fs.create_file(full_file_path_two, contents='2,dummy,rtf')
            patcher.fs.create_file(full_file_path_three, contents='3,dumy,doc')

            # Act
            # function should raise errors on an empty directory
            with pytest.raises(FileNotFoundError) as err:
                c.UploadOperations.upload_csv_to_s3(csv_dir, bucket_name,
                                                    upload_client,
                                                    resource_client,
                                                    **airflow_context)

            actual_message = str(err.value)
            # clean up and remove the fake filesystem
            patcher.tearDown()

        # Assert
        assert "does not exist on the server" in actual_message
Exemple #22
0
    def test_load_settings(self):
        class TestSession(CommonSession):
            validate_settings = mock.MagicMock()

        with Patcher() as patcher:
            file = os.path.join(self.session.component_root_path(), 'settings.yml')
            patcher.fs.CreateFile(file, contents='{"settings": {"test": 2}}')
            self.session = TestSession()

            self.assertEqual(self.session.component_config.settings['test'], 2)
    def test_clear_previous_docs(self):
        with Patcher() as patcher:
            fake_output_dir = pathlib.Path("local_docs")
            patcher.fs.create_file(
                str(fake_output_dir.joinpath("some_docs_file.html")), contents="This is some old documentation.",
            )
            self.assertTrue(fake_output_dir.is_dir())

            _clear_previous_docs(fake_output_dir)
            self.assertFalse(fake_output_dir.is_dir())
    def test_raises_if_file_has_too_many_lines(self):
        with Patcher() as patcher:
            file_path = "/foo/baz.txt"
            patcher.fs.create_file(file_path, contents="foo\nbar")

            with self.assertRaises(ValueError) as cm:
                NewsFileValidator(file_path).validate_file_contents()

            expected_error_message = f'News file "baz.txt" contains more than one line.'
            self.assertEqual(str(cm.exception), expected_error_message)
    def test_raises_if_file_is_empty(self):
        with Patcher() as patcher:
            file_path = "/foo/bar.txt"
            patcher.fs.create_file(file_path, contents="")

            with self.assertRaises(ValueError) as cm:
                NewsFileValidator(file_path).validate_file_contents()

            expected_error_message = f'Empty news file "bar.txt".'
            self.assertEqual(str(cm.exception), expected_error_message)
Exemple #26
0
 def testCopyDir(self, build_info):
     with Patcher() as patcher:
         patcher.fs.create_dir(r'/stage')
         patcher.fs.create_file(r'/stage/file1.txt', contents='file1')
         patcher.fs.create_file(r'/stage/file2.txt', contents='file2')
         cd = file_system.CopyDir([r'/stage', r'/root/copied'], build_info)
         cd.Validate()
         cd.Run()
         self.assertTrue(patcher.fs.exists(r'/root/copied/file1.txt'))
         self.assertTrue(patcher.fs.exists(r'/root/copied/file2.txt'))
Exemple #27
0
 def test_is_not_necessary(self, mock_check_output):
     with Patcher() as patcher:
         patcher.fs.CreateFile(CONFIG_PATH)
         path = os.path.join(SYSTEMD_PATHS[0],
                             os.path.split(SYSTEMD_SERVICE)[1])
         patcher.fs.CreateFile(path)
         runner = CliRunner()
         result = runner.invoke(cli, ['--root-not-required', 'all'])
         self.assertIn('Systemd service is already installed',
                       result.output)
Exemple #28
0
    def test_store_recovery2(self):
        with Patcher() as patcher:
            self.observer.session = self.session
            patcher.fs.MakeDirectory('/logs')

            self.observer.logs = []

            yield self.observer.store_recovery()
            self.assertEqual(self.observer.logs, [])
            self.assertFalse(os.path.isfile(self.observer.recovery_file(self.session)))
Exemple #29
0
def fs_state():
    pytest.importorskip("yaml")
    if sys.version_info < (3, 6):
        pytest.skip('docs are py3 only')
    patcher = Patcher(additional_skip_names=['expanduser'])
    patcher.setUp()
    patcher.pause()
    yield patcher
    patcher.tearDown()
    def test_transform_headlines_to_csv_pipelineone_success(
            self, airflow_context, headline_dir_res):
        """call to flatten jsons in the 'tempus_challenge_dag' headline
        folder succeeds."""

        # Arrange

        # Function Aliases
        # use an alias since the length of the real function call when used
        # is more than PEP-8's 79 line-character limit.
        # get the current pipeline info
        tf_json_func = c.TransformOperations.helper_execute_json_transformation
        j_fn = c.TransformOperations.helper_execute_keyword_json_transformation
        transfm_fnc = c.TransformOperations.transform_headlines_to_csv

        # setup a Mock of the transform function dependencies
        tf_json_func_mock = MagicMock(spec=tf_json_func)
        tf_keyword_func_mock = MagicMock(spec=j_fn)
        pipeline_info_obj = MagicMock(spec=c.NewsInfoDTO)
        news_info_obj = MagicMock(spec=c.NewsInfoDTO)

        # setup the behaviors of these Mocks
        tf_json_func_mock.side_effect = lambda dir, exec_date: True
        tf_keyword_func_mock.side_effect = lambda dir, exec_date: None
        pipeline_info_obj.side_effect = lambda pipeline_name: news_info_obj
        news_info_obj.get_headlines_directory = headline_dir_res

        # create three dummy json files
        full_file_path_one = os.path.join(headline_dir_res, 'dummy1.json')
        full_file_path_two = os.path.join(headline_dir_res, 'dummy2.json')
        full_file_path_three = os.path.join(headline_dir_res, 'dummy3.json')

        # setup a fake headlines directory which the function under test
        # requires be already existent
        with Patcher() as patcher:
            # setup pyfakefs - the fake filesystem
            patcher.setUp()

            # create a fake filesystem directory and files to test the method
            patcher.fs.create_dir(headline_dir_res)
            patcher.fs.create_file(full_file_path_one)
            patcher.fs.create_file(full_file_path_two)
            patcher.fs.create_file(full_file_path_three)

            # Act
            result = transfm_fnc(pipeline_information=pipeline_info_obj,
                                 tf_json_func=tf_json_func_mock,
                                 tf_key_json_func=tf_keyword_func_mock,
                                 **airflow_context)

        # Assert
        # return status of the transformation operation should be True to
        # indicate success
        assert result is True
def test_plugin_could_not_find_jscpd_report(danger: Danger):
    with patch("subprocess.Popen", new_callable=MockPopen) as popen:
        popen.set_command("which jscpd", returncode=0)
        popen.set_command("jscpd . -r json", returncode=0)

        with Patcher():
            plugin = DangerJSCPD()
            plugin.jscpd()

    message = "Could not find jscpd-report.json in report directory"
    assert danger.results.fails == [Violation(message=message)]
def fs(request):
    """ Fake filesystem. """
    patcher = Patcher()
    patcher.setUp()
    request.addfinalizer(patcher.tearDown)
    return patcher.fs