def fs_reload_deployer(): patcher = Patcher( modules_to_reload=[raiden_contracts.contract_manager, raiden_contracts.deploy.__main__] ) patcher.setUp() yield patcher.fs patcher.tearDown()
def fs_reload_deployer() -> Generator[FakeFilesystem, None, None]: patcher = Patcher(modules_to_reload=[ raiden_contracts.contract_manager, raiden_contracts.deploy.__main__ ]) patcher.setUp() yield patcher.fs patcher.tearDown()
class GenerateUploadBucketIterTest(TestCase): def setUp(self): self.patcher = Patcher() self.patcher.setUp() for file in ALL_FILES: self.patcher.fs.create_file(file, contents='test') def upload_files_to_path_set(self, file_iter): return set([upload_file.path for upload_file in file_iter]) def test_iter_dir(self): file_iter = generate_upload_bucket_iter('/dummy', recursive=True) file_set = self.upload_files_to_path_set(file_iter) self.assertSetEqual(file_set, REGULAR_FILE_SET) def test_iter_dir_without_recursive(self): dir_path = '/dummy' file_iter = generate_upload_bucket_iter(dir_path, recursive=False) file_set = self.upload_files_to_path_set(file_iter) self.assertSetEqual(file_set, TOPLAYER_FILE_SET) def test_iter_dir_with_invalid_path(self): invalid_path = '/invalid' file_iter = generate_upload_bucket_iter(invalid_path) file_set = self.upload_files_to_path_set(file_iter) self.assertEqual(len(file_set), 0) def tearDown(self): self.patcher.tearDown()
def fs(): """ Fake filesystem. """ patcher = Patcher() patcher.setUp() tokenize._builtin_open = patcher.original_open yield patcher.fs patcher.tearDown()
def fs_reload_example(): """ Fake filesystem. """ patcher = Patcher(modules_to_reload=[example]) patcher.setUp() linecache.open = patcher.original_open tokenize._builtin_open = patcher.original_open yield patcher.fs patcher.tearDown()
def fs_state(): pytest.importorskip("yaml") if sys.version_info < (3, 6): pytest.skip('docs are py3 only') patcher = Patcher(additional_skip_names=['expanduser']) patcher.setUp() patcher.pause() yield patcher patcher.tearDown()
def fs_state(): pytest.importorskip("yaml") if sys.version_info < (3, 6): pytest.skip('docs are py3 only') patcher = Patcher(additional_skip_names=['expanduser']) patcher.setUp() patcher.pause() yield patcher patcher.tearDown()
def fs(request): """ Fake filesystem. """ if hasattr(request, 'param'): # pass optional parameters via @pytest.mark.parametrize patcher = Patcher(*request.param) else: patcher = Patcher() patcher.setUp() yield patcher.fs patcher.tearDown()
class FileMockBase(object): contents = '' def setUp(self): self.patcher = Patcher() self.patcher.setUp() self.file = 'config.yml' self.patcher.fs.CreateFile(self.file, contents=self.contents) def tearDown(self): self.patcher.tearDown()
class TestLocalCache(unittest.TestCase): def setUp(self): self.fs_patcher = Patcher() self.fs_patcher.setUp() self.cache = LocalCache(dir=TEST_DIR) def test_clear(self): self.cache.clear(ID_DUCT, ID_STRING_NONEXISTANT) self.cache.set(ID_DUCT, ID_STRING, 'foo') self.cache.clear(ID_DUCT, ID_STRING) self.assertFalse(self.cache.has_key(ID_DUCT, ID_STRING), 'expected not to find key if key is cleared') self.assertIsNone(self.cache.get(ID_DUCT, ID_STRING), 'expected to get None if key has been cleared') def test_clear_all(self): self.cache.set(ID_DUCT, ID_STRING, 'foo') self.cache.set(ID_DUCT, ID_STRING_ANOTHER, 'bar') self.cache.clear_all(ID_DUCT) self.assertFalse(self.cache.has_key(ID_DUCT, ID_STRING), 'expected not to find key after clear_all') self.assertFalse(self.cache.has_key(ID_DUCT, ID_STRING_ANOTHER), 'expected not to find key after clear_all') def test_get(self): self.assertIsNone(self.cache.get(ID_DUCT, ID_STRING_NONEXISTANT), 'expected not find non-existant key') self.cache.set(ID_DUCT, ID_STRING, 'foo') self.assertEqual( self.cache.get(ID_DUCT, ID_STRING), 'foo', 'expected object retrieved from cache to be equal to its pre-cached value' ) def test_has_key(self): self.assertFalse(self.cache.has_key(ID_DUCT, ID_STRING_NONEXISTANT), 'expected not to find non-existant key') self.cache.set(ID_DUCT, ID_STRING, 'foo') self.assertTrue(self.cache.has_key(ID_DUCT, ID_STRING), 'expected to find key after setting it') def test_keys(self): pass # TODO test keys when implemented def test_set(self): serializer_mock = mock.Mock() self.cache.set(ID_DUCT, ID_STRING, 'foo', serializer=serializer_mock.serialize) serializer_mock.serialize.assert_called_once() def tearDown(self): self.fs_patcher.tearDown()
def set_up(cls, test=None, **kwargs_patcher): global _os_path # only require pyfakefs if it is actually used from pyfakefs.fake_filesystem_unittest import Patcher stubber = Patcher(**kwargs_patcher) # keep a reference to the real os.path so we can always use "os.path.isfile()" __os_path = os.path stubber.setUp() _os_path = __os_path fake_fs = FakeFS(stubber) if test: test.addCleanup(fake_fs.tear_down) return fake_fs
class TestConfigParserWrapper(unittest.TestCase): def setUp(self): self.patcher = Patcher() self.patcher.setUp() self.wrapper = ConfigParserWrapper() def test_read_normal_file_success(self): good_config = ("[TestSection]\n" "item1=answer1\n" "item2=answer2\n") self.patcher.fs.create_file('/foo/bar', contents=good_config) self.wrapper.read('/foo/bar') self.assertEqual(['TestSection'], self.wrapper.sections()) self.assertEqual('answer1', self.wrapper.get('TestSection', 'item1')) self.assertEqual('answer2', self.wrapper.get('TestSection', 'item2')) def test_read_string_file_success(self): good_config = ("[TestSection]\n" "item1=answer1\n" "item2=answer2\n") self.wrapper.read(good_config) self.assertEqual(['TestSection'], self.wrapper.sections()) self.assertEqual('answer1', self.wrapper.get('TestSection', 'item1')) self.assertEqual('answer2', self.wrapper.get('TestSection', 'item2')) def test_read_dictionary_file_success(self): good_config = { 'TestSection': { 'item1': 'answer1', 'item2': 'answer2', } } self.wrapper.read(good_config) self.assertEqual(['TestSection'], self.wrapper.sections()) self.assertEqual('answer1', self.wrapper.get('TestSection', 'item1')) self.assertEqual('answer2', self.wrapper.get('TestSection', 'item2')) ''' def test_read_file_object_success(self): good_config = ("[TestSection]\n" "item1=answer1\n" "item2=answer2\n") self.patcher.fs.create_file('/foo/bar', contents=good_config) with open('/foo/bar', 'r') as file_: self.wrapper.read(file_) self.assertEqual(['TestSection'], self.wrapper.sections()) self.assertEqual('answer1', self.wrapper.get('TestSection', 'item1')) self.assertEqual('answer2', self.wrapper.get('TestSection', 'item2')) ''' def tearDown(self): self.patcher.tearDown()
class GetCompressedFileTest(TestCase): def setUp(self): self.patcher = Patcher() self.patcher.setUp() def tearDown(self): self.patcher.tearDown() def test_zip_file(self): self.patcher.fs.create_file('zip_file.txt', contents='test') with zipfile.ZipFile('zip_file.zip', 'w') as zip_file: zip_file.write('zip_file.txt') self.assertFileExists('zip_file.zip') cf = get_compressed_file('zip_file.zip') self.assertIsNotNone(cf, msg='test_zip_file faild: cf is none') self.assertEqual(cf.extension_name, ZIPFile.extension_name, msg='test_zip_file faild: ext={}'.format( cf.extension_name)) def test_tgz_file(self): self.patcher.fs.create_file('tar_file.tar.gz') with tarfile.open('tar_file.tar.gz', 'w:gz') as f: f.close() self.assertFileExists('tar_file.tar.gz') cf = get_compressed_file('tar_file.tar.gz') self.assertIsNotNone(cf, msg='test_tar_file faild: cf is none') self.assertEqual(cf.extension_name, TGZFile.extension_name, msg='test_tar_file faild: ext={}'.format( cf.extension_name)) def test_tar_file(self): self.patcher.fs.create_file('tar_file.tar') with tarfile.open('tar_file.tar', 'w:') as f: f.close() self.assertFileExists('tar_file.tar') cf = get_compressed_file('tar_file.tar') self.assertIsNotNone(cf, msg='test_tar_file faild: cf is none') self.assertEqual(cf.extension_name, TARFile.extension_name, msg='test_tar_file faild: ext={}'.format( cf.extension_name)) def assertFileExists(self, file_path): self.assertTrue(os.path.exists(file_path), msg='File {0} does not exist'.format(file_path))
class UploadFileSpecTest(TestCase): def setUp(self): self.fakefs_patcher = Patcher() self.fakefs_patcher.setUp() def tearDown(self): self.fakefs_patcher.tearDown() def test_parse_file_not_found(self): self.assertRaises(FileNotFoundError, lambda: UploadFileSpec.parse("unknown file")) def test_parse_file_invalid_json(self): self.assertRaises(FileSpecFormatError, lambda: self.parse_string('{')) def test_parse_file_contents_must_be_array(self): self.assertRaises(FileSpecFormatError, lambda: self.parse_string('{}')) def test_parse_file_item_validation(self): self.assertRaises(FileSpecFormatError, lambda: self.parse_string('{1}')) def test_empty(self): spec = self.parse_string('[]') self.assertSetEqual(spec.paths, set()) def test_file_no_metadata(self): spec = self.parse_string(r"""[ {"file":"test.txt"} ]""") self.assertSetEqual(spec.paths, {'test.txt'}) def test_file_metadata(self): spec = self.parse_string(r"""[ { "file":"test.csv", "metadata": { "a": "b", "c": 1 } } ]""") self.assertSetEqual(spec.paths, {'test.csv'}) self.assertDictEqual(spec.get_metadata('test.csv'), {'a': 'b', 'c': 1}) def parse_string(self, contents): name = '/file_spec/test_{}.json'.format(random.randrange(100)) self.fakefs_patcher.fs.create_file(name, contents=contents) return UploadFileSpec.parse(name)
class TestFilesystemFinder(TestCase): patcher = None def setUp(self): self.patcher = Patcher() self.patcher.setUp() self.patcher.fs.CreateFile('/tmp/project/templates/base.html') self.patcher.fs.CreateFile('/tmp/project/templates/foo/bar.html') self.patcher.fs.CreateFile('/tmp/project/other_templates/baz.html') def tearDown(self): self.patcher.tearDown() def test_nothing_to_do(self): """ With neither filesystem nor app directories to search, there can be no template_s. """ self.assertEqual( list( template_finder.templates_for_engine({ 'BACKEND': 'django.templates.backends.jinja2.Jinja2', 'APP_DIRS': False, 'DIRS': [] })), []) def test_filesystem_loader(self): """ Using the filesystem loader, template_s. are found within the directories specified in DIRS for the given engine config. """ self.assertEqual( list( template_finder.templates_for_engine({ 'BACKEND': 'django.templates.backends.django.Djangotemplate.', 'APP_DIRS': False, 'DIRS': [ '/tmp/project/templates/', '/tmp/project/other_templates/' ] })), [ ('base.html', '/tmp/project/templates/base.html'), ('foo/bar.html', '/tmp/project/templates/foo/bar.html'), ('baz.html', '/tmp/project/other_templates/baz.html'), ])
class GenerateUploadFileIterTest(TestCase): def setUp(self): self.patcher = Patcher() self.patcher.setUp() for file in ALL_FILES: self.patcher.fs.create_file(file, contents='test') def upload_files_to_path_set(self, file_iter): return set([upload_file.path for upload_file in file_iter]) def test_iter_dir(self): file_iter = generate_upload_file_iter(['/dummy'], recursive=True) file_set = self.upload_files_to_path_set(file_iter) self.assertSetEqual(file_set, REGULAR_FILE_SET) def test_iter_dir_hidden(self): file_iter = generate_upload_file_iter(['/dummy'], recursive=True, ignore_hidden_files=False) file_set = self.upload_files_to_path_set(file_iter) self.assertSetEqual(file_set, ALL_FILES) def test_iter_file(self): file = '/dummy/file1.txt' file_iter = generate_upload_file_iter([file]) file_set = self.upload_files_to_path_set(file_iter) self.assertSetEqual(file_set, set([file])) def test_iter_dir_with_reject(self): dir_path = '/dummy' file_iter = generate_upload_file_iter([dir_path], recursive=False) with self.assertRaises(InvalidPathException) as context: list(file_iter) assert context.exception.path == dir_path def test_iter_dir_with_invalid_path(self): invalid_path = '/invalid' file_iter = generate_upload_file_iter([invalid_path]) with self.assertRaises(InvalidPathException) as context: list(file_iter) assert context.exception.path == invalid_path def tearDown(self): self.patcher.tearDown()
def source_fs(request): patcher = Patcher() patcher.setUp() request.addfinalizer(patcher.tearDown) patcher.fs.create_file('/home/foo/src/bar-project/config.json') patcher.fs.create_file('/home/foo/src/bar-project/.gitignore') patcher.fs.create_file('/home/foo/src/bar-project/.git/config') patcher.fs.create_file('/home/foo/src/bar-project/py-lambda-packer.yaml') patcher.fs.create_file( '/home/foo/src/bar-project/templates/images/index.png') patcher.fs.create_file('/home/foo/src/bar-project/templates/index.html') patcher.fs.create_file('/home/foo/src/bar-project/static/images/hello.png') patcher.fs.create_file('/home/foo/src/bar-project/static/images/thumb.png') patcher.fs.create_file('/home/foo/src/bar-project/static/images/large.png') patcher.fs.create_file('/home/foo/src/bar-project/static/images/large.jpg') patcher.fs.create_file('/home/foo/src/bar-project/static/images/large.gif') patcher.fs.create_dir('/home/foo/src/bar-project/static/css') patcher.fs.create_dir('/home/foo/src/bar-project/static/js') patcher.fs.create_file('/home/foo/src/config/global-config.json') patcher.fs.create_file('/home/foo/src/bar-project/posts/a/b/c/d/bw.html') patcher.fs.create_file('/home/foo/src/bar-project/posts/a1/b/diff/bw.html') patcher.fs.create_file( '/home/foo/src/bar-project/posts/a/b/c/d/e/bar.html') patcher.fs.create_file( '/home/foo/src/bar-project/posts/a/b/c/d/e/got.html') patcher.fs.create_file('/home/foo/src/bar-project/posts/a/b/c/d/tess.html') patcher.fs.create_file('/home/foo/src/bar-project/posts/a/b/c/d/tess.txt') patcher.fs.create_file('/home/foo/src/bar-project/posts/a/ref-90.html') patcher.fs.create_file( '/home/foo/src/bar-project/posts/bucket/link-00.html') patcher.fs.create_file( '/home/foo/src/bar-project/posts/bucket/link-03.html') patcher.fs.create_symlink( '/home/foo/src/bar-project/posts/a/b/c/d/symlink-dir', '/home/foo/src/bar-project/posts/bucket') patcher.fs.create_symlink( '/home/foo/src/bar-project/posts/links/tef-90.html', '/home/foo/src/bar-project/posts/a/b/dpo.html') patcher.fs.create_dir('/home/foo/tmp') return patcher.fs
def global_fs(request): # pyfakefs's setup takes a couple seconds (or maybe it's the profiler # recording 2148994 function calls) as it # scans all modules in the venv and all the functions in them # to see if they open files (so that it can patch them). # So we initialize pyfakefs only once and reset the filesystem # (a cheap operation) after every test instead. from pyfakefs.fake_filesystem_unittest import Patcher import tokenize patcher = Patcher() patcher.setUp() tokenize._builtin_open = patcher.original_open try: yield patcher.fs finally: patcher.tearDown()
def afs(request, monkeypatch): """ Fake filesystem. """ patcher = Patcher() with patch.multiple('pyfakefs.fake_filesystem', FakeOsModule=FakeOsModule, FakeFileOpen=FakeFileOpen): patcher.setUp() attr = find_sync_open_attr() monkeypatch.setattr(threadpool, attr, patcher.fake_open) request.addfinalizer(patcher.tearDown) patcher.fs.fake_open = patcher.fake_open return patcher.fs
def fs_reload_example(request): """ Fake filesystem. """ patcher = Patcher(modules_to_reload=[example]) patcher.setUp() request.addfinalizer(patcher.tearDown) return patcher.fs
def fs(): """ Fake filesystem. """ patcher = Patcher() patcher.setUp() yield patcher.tearDown()
def fs(request): """ Fake filesystem. """ patcher = Patcher() patcher.setUp() request.addfinalizer(patcher.tearDown) return patcher.fs
class GalleryPhotoModelTests(TestCase): def setUp(self): # Use pyfakefs for filesystem access self.fs_patcher = Patcher() self.fs_patcher.setUp() # Mock away things that pyfakefs doesn't handle self.flock_patcher = mock.patch('fcntl.flock') self.flock_patcher.start() def tearDown(self): self.fs_patcher.tearDown() self.flock_patcher.stop() def test_photo_upload(self): jpeg_data = self._create_jpeg_data() gallery = Gallery.objects.create(title='Test Gallery') GalleryPhoto.objects.create(photo=SimpleUploadedFile('test.jpg', jpeg_data), gallery=gallery) # There should be one photo gallery_photos = gallery.galleryphoto_set.all() self.assertEqual(1, len(gallery_photos)) # Which is the one just uploaded photo = gallery_photos[0] self.assertEqual('test.jpg', str(photo)) self.assertIsNone(photo.date) def test_photo_timestamp(self): # Create image with EXIF timestamp timestamp = timezone.make_aware(datetime(2015, 8, 15)) exif = {'Exif': {piexif.ExifIFD.DateTimeOriginal: timestamp.strftime('%Y:%m:%d %H:%M:%S')}} jpeg_data = self._create_jpeg_data(exif=exif) # Save image to gallery gallery = Gallery.objects.create(title='Test Gallery') photo = GalleryPhoto.objects.create(photo=SimpleUploadedFile('test.jpg', jpeg_data), gallery=gallery) self.assertEqual(timestamp, photo.date) @mock.patch('website.models.logger') def test_invalid_orientation_tag(self, mock_logger): gallery = Gallery.objects.create(title='Test Gallery') jpeg_data = self._create_jpeg_data(exif={'0th': {piexif.ImageIFD.Orientation: 666}}) GalleryPhoto.objects.create(photo=SimpleUploadedFile('test.jpg', jpeg_data), gallery=gallery) # The photo should still be saved gallery_photos = gallery.galleryphoto_set.all() self.assertEqual(1, len(gallery_photos)) mock_logger.warning.assert_called_with('Unexpected orientation: 666') def test_photo_rotation(self): gallery = Gallery.objects.create(title='Test Gallery') for rotation in range(1, 9): # Create image with EXIF rotation info jpeg_data = self._create_jpeg_data(exif={'0th': {piexif.ImageIFD.Orientation: rotation}}) # Save image to gallery photo = GalleryPhoto.objects.create(photo=SimpleUploadedFile('test.jpg', jpeg_data), gallery=gallery) # Read and verify uploaded photos orientation tag byte_io = BytesIO(photo.photo.read()) img = Image.open(byte_io) exif_dict = piexif.load(img.info['exif']) self.assertIn(piexif.ImageIFD.Orientation, exif_dict['0th']) self.assertEqual(exif_dict['0th'][piexif.ImageIFD.Orientation], 1) def _create_jpeg_data(self, exif={}): byte_io = BytesIO() img = Image.new('RGB', (10, 10)) if exif: img.save(byte_io, 'JPEG', exif=piexif.dump(exif)) else: img.save(byte_io, 'JPEG') return byte_io.getvalue()
def fs(request): patcher = Patcher() patcher.setUp() request.addfinalizer(patcher.tearDown) return patcher.fs
def fs(request): """ Fake filesystem. """ patcher = Patcher() patcher.setUp() request.addfinalizer(patcher.tearDown) return patcher.fs
class TestTemplateReader(TestCase): patcher = None def setUp(self): self.patcher = Patcher() self.patcher.setUp() def tearDown(self): self.patcher.tearDown() def test_nothing_to_do(self): self.assertEqual(to_tree([], 'banana'), {'name': 'banana'}) def test_flat(self): """ If none of the templates have {% extends %}, the result will be flat """ self.patcher.fs.CreateFile('/tmp/project/templates/base.html') self.patcher.fs.CreateFile('/tmp/project/templates/foo/bar.html', contents='hello dolly') self.patcher.fs.CreateFile('/tmp/project/other_templates/baz.html') self.assertEqual( to_tree([ ('base.html', '/tmp/project/templates/base.html'), ('foo/bar.html', '/tmp/project/templates/foo/bar.html'), ('baz.html', '/tmp/project/other_templates/baz.html'), ], 'banana'), { 'name': 'banana', 'children': [{ 'name': 'base.html' }, { 'name': 'baz.html' }, { 'name': 'foo/bar.html' }], }) def test_extends(self): """ If {%extends%} contains a string, a tree will be formed """ self.patcher.fs.CreateFile('/tmp/project/templates/base.html') self.patcher.fs.CreateFile('/tmp/project/templates/foo/bar.html', contents='{% extends "base.html"%}') self.patcher.fs.CreateFile('/tmp/project/other_templates/baz.html', contents='{% extends "base.html"%}') self.assertEqual( to_tree([ ('base.html', '/tmp/project/templates/base.html'), ('foo/bar.html', '/tmp/project/templates/foo/bar.html'), ('baz.html', '/tmp/project/other_templates/baz.html'), ], 'banana'), { 'name': 'banana', 'children': [ { 'name': 'base.html', 'children': [ { 'name': 'baz.html' }, { 'name': 'foo/bar.html' }, ] }, ], }) def test_variable_extends(self): """ If {%extends%} contains a variable, it becomes a child of __unknown__ """ self.patcher.fs.CreateFile('/tmp/project/templates/base.html') self.patcher.fs.CreateFile('/tmp/project/templates/foo/bar.html', contents='{% extends base %}') self.patcher.fs.CreateFile('/tmp/project/other_templates/baz.html') self.assertEqual( to_tree([ ('base.html', '/tmp/project/templates/base.html'), ('foo/bar.html', '/tmp/project/templates/foo/bar.html'), ], 'banana'), { 'name': 'banana', 'children': [{ 'name': '__unknown__', 'children': [{ 'name': 'foo/bar.html' }] }, { 'name': 'base.html' }], }) def test_bad_extends(self): """ If extends is not the first tag, it is ignored. """ self.patcher.fs.CreateFile('/tmp/project/templates/base.html') self.patcher.fs.CreateFile( '/tmp/project/templates/foo/bar.html', contents= "hello {% firstof world dolly sailor %} {% extends 'base.html'%}") self.assertEqual( to_tree([ ('base.html', '/tmp/project/templates/base.html'), ('foo/bar.html', '/tmp/project/templates/foo/bar.html'), ], 'banana'), { 'name': 'banana', 'children': [{ 'name': 'base.html' }, { 'name': 'foo/bar.html' }], })
class TestMultitasking(unittest.TestCase): def setUp(self): r = requests.get(urlLink, timeout=1) urlString = r.content.decode() numberOfValidLinks_t = [0] validLinks_t = [''] unvalidLinks_t = [''] urlList = str.splitlines(urlString) multitasking.findNumberOfValidLinks(urlList[:15], numberOfValidLinks_t, validLinks_t, unvalidLinks_t, 0) self.patcher = Patcher() self.patcher.setUp() self.workTask_1 = WorkTask() self.workTask_1.cpuNumber = 1 self.workTask_1.startCount = 0 self.workTask_1.endCount = len(validLinks_t[0]) self.workTask_1.workList = validLinks_t[0] self.directory = 'testingDir' self.patcher.fs.create_dir(self.directory) self.workNumbers = multitasking.divideWorkload( self.workTask_1.workList, 3) def tearDown(self): self.patcher.tearDown() # TODO: Mock filesystem to create and delete test-files. def test_create_file(self): file_path = self.directory self.assertTrue(os.path.exists(file_path)) # self.fs.create_file(file_path) # self.assertTrue(os.path.exists(file_path)) def test_findNumberOfValidLinks(self): # TODO: Refactor Do setup in a setUp method. numberOfValidLinks_t = [0] validLinks_t = [''] unvalidLinks_t = [''] try: multitasking.findNumberOfValidLinks(self.workTask_1.workList, numberOfValidLinks_t, validLinks_t, unvalidLinks_t, 0) except Exception as e: self.fail('Unexpected exception') print(f'\n\n {e}') else: self.assertEqual(numberOfValidLinks_t[0], self.workTask_1.endCount) self.assertEqual(len(validLinks_t[0]), self.workTask_1.endCount) def test_divideWorkLoad(self): testList = self.workTask_1.workList cpuNumber = len(testList) # Testing edge case when there are as many items in list as cores. ret = multitasking.divideWorkload(testList, cpuNumber) self.assertEqual(len(ret), len(testList)) self.assertEqual(len(testList), sum(ret)) for number in ret: self.assertLessEqual(number, len(testList) / cpuNumber + 1) # Testing that only one list element is returned when 1 core. cpuNumber = 1 ret = multitasking.divideWorkload(testList, cpuNumber) self.assertEqual(len(ret), 1) self.assertEqual(ret[0], len(testList)) self.assertEqual(len(testList), sum(ret)) for number in ret: self.assertLessEqual(number, len(testList) / cpuNumber + 1) # Testing edge case when there are more cores than items. cpuNumber = len(testList) + 3 ret = multitasking.divideWorkload(testList, cpuNumber) self.assertEqual(len(testList), sum(ret)) for number in ret: self.assertLessEqual(number, len(testList) / cpuNumber + 1) def test_assignWorkTasks(self): ret = multitasking.assignWorkTasks(self.workNumbers, self.workTask_1.workList, self.directory) self.assertEqual(len(ret), len(self.workNumbers)) def test_saveImages(self): pass """
def fs_reload_example(): """ Fake filesystem. """ patcher = Patcher(modules_to_reload=[example]) patcher.setUp() yield patcher.fs patcher.tearDown()
class TestAppFinder(TestCase): patcher = None engine_config = { 'BACKEND': 'django.templates.backends.django.Djangotemplate_tree_finder.', 'APP_DIRS': True, } def setUp(self): self.patcher = Patcher() self.patcher.setUp() # called in the initialization code # app files self.patcher.fs.CreateFile('/tmp/project/project/templates/abc.html') self.patcher.fs.CreateFile( '/tmp/project/my_app/templates/my_app/def.html') self.patcher.fs.CreateFile( '/tmp/project/your_app/templates/your_app/def.html') self.mock_apps = Apps( collections.OrderedDict([ ('project', AppConfig('/tmp/project/project/')), ('my_app', AppConfig('/tmp/project/my_app/')), ('your_app', AppConfig('/tmp/project/your_app/')) ])) def tearDown(self): self.patcher.tearDown() # somewhere in the cleanup code def test_app_loader(self): """ Using the app loader, template_tree_finder. are found within the directories corresponding to the apps for the given engine config. """ with mock.patch('template_tree.template_finder.apps', new=self.mock_apps): self.assertEqual( list(template_finder.templates_for_engine(self.engine_config)), [ ('abc.html', '/tmp/project/project/templates/abc.html'), ('my_app/def.html', '/tmp/project/my_app/templates/my_app/def.html'), ('your_app/def.html', '/tmp/project/your_app/templates/your_app/def.html'), ]) def test_exclude_apps(self): """ exclude_apps excludes template_tree_finder. from the given apps """ with mock.patch('template_tree.template_finder.apps', new=self.mock_apps): self.assertEqual( list( template_finder.templates_for_engine( self.engine_config, ['my_app', 'your_app'])), [('abc.html', '/tmp/project/project/templates/abc.html')]) def test_default_app_exclusion(self): """ By default, the admin app is excluded. """ mock_apps = Apps( collections.OrderedDict([ ('project', AppConfig('/tmp/project/project/')), ('admin', AppConfig('/tmp/project/my_app/')), ('your_app', AppConfig('/tmp/project/your_app/')) ])) with mock.patch('template_tree.template_finder.apps', new=mock_apps): self.assertEqual( list(template_finder.templates_for_engine(self.engine_config)), [ ('abc.html', '/tmp/project/project/templates/abc.html'), ('your_app/def.html', '/tmp/project/your_app/templates/your_app/def.html'), ]) def test_include_admin_apps(self): """ The admin app can be included in the report, by providing an empty list """ mock_apps = Apps( collections.OrderedDict([ ('project', AppConfig('/tmp/project/project/')), ('admin', AppConfig('/tmp/project/my_app/')), ('your_app', AppConfig('/tmp/project/your_app/')) ])) with mock.patch('template_tree.template_finder.apps', new=mock_apps): self.assertEqual( list( template_finder.templates_for_engine( self.engine_config, [])), [ ('abc.html', '/tmp/project/project/templates/abc.html'), ('my_app/def.html', '/tmp/project/my_app/templates/my_app/def.html'), ('your_app/def.html', '/tmp/project/your_app/templates/your_app/def.html'), ])