def setUp(self): django.setup() self.workspace1 = storage_test_utils.create_workspace(name='ws1') self.workspace2 = storage_test_utils.create_workspace(name='ws2') rest.login_client(self.client)
def test_success(self, mock_rmdir, mock_exists, mock_workspaces): '''Tests calling ScaleFileManager.cleanup_download_dir() successfully''' download_dir = os.path.join('download', 'dir') work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() workspace_1.cleanup_download_dir = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir( work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() workspace_2.cleanup_download_dir = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir( work_dir, workspace_2) def new_exists(path): return path == workspace_1_work_dir mock_exists.side_effect = new_exists def new_workspaces(): return [workspace_1, workspace_2] mock_workspaces.side_effect = new_workspaces ScaleFile.objects.cleanup_download_dir(download_dir, work_dir) workspace_1.cleanup_download_dir.assert_called_once_with( download_dir, workspace_1_work_dir) mock_rmdir.assert_called_once_with(workspace_1_work_dir) # Workspace 2 should not be cleaned up because os.path.exists() returns false self.assertFalse(workspace_2.cleanup_download_dir.called)
def test_execute(self): """Tests executing a CreateIngest message """ workspace_1 = storage_test_utils.create_workspace() workspace_2 = storage_test_utils.create_workspace() source_file = ScaleFile.objects.create(file_name='input_file', file_type='SOURCE', media_type='text/plain', file_size=10, data_type_tags=['type1'], file_path='the_path', workspace=workspace_1) ingest = Ingest.objects.create(file_name='input_file', file_size=10, status='TRANSFERRING', bytes_transferred=10, transfer_started=now(), media_type='text/plain', ingest_started=now(), data_started=now(), workspace=workspace_1, new_workspace=workspace_2, data_type_tags=['type1'], source_file=source_file) message = CreateIngest() message.create_ingest_type = 'strike_job' message.strike_id = 1 message.ingest_id = ingest.id result = message.execute() self.assertTrue(result)
def test_deleted_file(self): '''Tests calling ScaleFileManager.move_files() with a deleted file''' work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(file_name='my_file_1.txt', workspace=workspace_1) new_workspace_path_1 = os.path.join('my', 'new', 'path', '1', os.path.basename(file_1.file_path)) file_2 = storage_test_utils.create_file(file_name='my_file_2.txt', workspace=workspace_1) file_2.is_deleted = True file_2.save() new_workspace_path_2 = os.path.join('my', 'new', 'path', '2', os.path.basename(file_2.file_path)) workspace_1.move_files = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() workspace_2.is_active = False workspace_2.save() file_3 = storage_test_utils.create_file(file_name='my_file_3.txt', workspace=workspace_2) new_workspace_path_3 = os.path.join('my', 'new', 'path', '3', os.path.basename(file_3.file_path)) file_4 = storage_test_utils.create_file(file_name='my_file_4.txt', workspace=workspace_2) new_workspace_path_4 = os.path.join('my', 'new', 'path', '4', os.path.basename(file_4.file_path)) workspace_2.move_files = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_2) files = [(file_1, new_workspace_path_1), (file_2, new_workspace_path_2), (file_3, new_workspace_path_3), (file_4, new_workspace_path_4)] self.assertRaises(DeletedFile, ScaleFile.objects.move_files, work_dir, files)
def test_success(self): """Tests calling ScaleFileManager.download_files() successfully""" workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace_1) local_path_1 = '/my/local/path/file.txt' file_2 = storage_test_utils.create_file(workspace=workspace_1) local_path_2 = '/another/local/path/file.txt' file_3 = storage_test_utils.create_file(workspace=workspace_1) local_path_3 = '/another/local/path/file.json' workspace_1.setup_download_dir = MagicMock() workspace_1.download_files = MagicMock() workspace_2 = storage_test_utils.create_workspace() file_4 = storage_test_utils.create_file(workspace=workspace_2) local_path_4 = '/my/local/path/4/file.txt' file_5 = storage_test_utils.create_file(workspace=workspace_2) local_path_5 = '/another/local/path/5/file.txt' workspace_2.setup_download_dir = MagicMock() workspace_2.download_files = MagicMock() files = [FileDownload(file_1, local_path_1, False), FileDownload(file_2, local_path_2, False), FileDownload(file_3, local_path_3, False), FileDownload(file_4, local_path_4, False), FileDownload(file_5, local_path_5, False)] ScaleFile.objects.download_files(files) workspace_1.download_files.assert_called_once_with([FileDownload(file_1, local_path_1, False), FileDownload(file_2, local_path_2, False), FileDownload(file_3, local_path_3, False)]) workspace_2.download_files.assert_called_once_with([FileDownload(file_4, local_path_4, False), FileDownload(file_5, local_path_5, False)])
def test_deleted_file(self): '''Tests calling ScaleFileManager.download_files() with a deleted file''' download_dir = os.path.join('download', 'dir') work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace_1) local_path_1 = u'my/local/path/file.txt' file_2 = storage_test_utils.create_file(workspace=workspace_1) local_path_2 = u'another/local/path/file.txt' file_2.is_deleted = True file_2.save() file_3 = storage_test_utils.create_file(workspace=workspace_1) local_path_3 = u'another/local/path/file.json' workspace_1.download_files = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() file_4 = storage_test_utils.create_file(workspace=workspace_2) local_path_4 = u'my/local/path/4/file.txt' file_5 = storage_test_utils.create_file(workspace=workspace_2) local_path_5 = u'another/local/path/5/file.txt' workspace_2.download_files = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_2) files = [(file_1, local_path_1), (file_2, local_path_2), (file_3, local_path_3), (file_4, local_path_4), (file_5, local_path_5)] self.assertRaises(DeletedFile, ScaleFile.objects.download_files, download_dir, work_dir, files)
def test_inactive_workspace(self): """Tests calling ScaleFileManager.download_files() with an inactive workspace""" workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace_1) local_path_1 = '/my/local/path/file.txt' file_2 = storage_test_utils.create_file(workspace=workspace_1) local_path_2 = '/another/local/path/file.txt' file_3 = storage_test_utils.create_file(workspace=workspace_1) local_path_3 = '/another/local/path/file.json' workspace_1.download_files = MagicMock() workspace_2 = storage_test_utils.create_workspace() workspace_2.is_active = False workspace_2.save() file_4 = storage_test_utils.create_file(workspace=workspace_2) local_path_4 = '/my/local/path/4/file.txt' file_5 = storage_test_utils.create_file(workspace=workspace_2) local_path_5 = '/another/local/path/5/file.txt' files = [ FileDownload(file_1, local_path_1), FileDownload(file_2, local_path_2), FileDownload(file_3, local_path_3), FileDownload(file_4, local_path_4), FileDownload(file_5, local_path_5) ] self.assertRaises(ArchivedWorkspace, ScaleFile.objects.download_files, files)
def test_success(self, mock_exists, mock_workspaces): '''Tests calling ScaleFileManager.cleanup_download_dir() successfully''' download_dir = os.path.join('download', 'dir') work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() workspace_1.cleanup_download_dir = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() workspace_2.cleanup_download_dir = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_2) def new_exists(path): return path == workspace_1_work_dir mock_exists.side_effect = new_exists def new_workspaces(): return [workspace_1, workspace_2] mock_workspaces.side_effect = new_workspaces ScaleFile.objects.cleanup_download_dir(download_dir, work_dir) workspace_1.cleanup_download_dir.assert_called_once_with(download_dir, workspace_1_work_dir) # Workspace 2 should not be cleaned up because os.path.exists() returns false self.assertFalse(workspace_2.cleanup_download_dir.called)
def setUp(self): django.setup() self.workspace = storage_test_utils.create_workspace() self.new_workspace = storage_test_utils.create_workspace() self.inactive_workspace = storage_test_utils.create_workspace( is_active=False)
def setUp(self): django.setup() self.config = { 'broker': { 'type': 'host', 'host_path': '/host/path', }, } self.workspace = storage_test_utils.create_workspace(json_config=self.config) self.config2 = { "broker": { "type": "s3", "bucket_name": "my_bucket.domain.com", "credentials": { "access_key_id": "secret", "secret_access_key": "super-secret" }, "host_path": "/my_bucket", "region_name": "us-east-1" } } self.secret_config = copy.deepcopy(self.config2) self.secret_config['broker']['credentials']['access_key_id'] = '************' self.secret_config['broker']['credentials']['secret_access_key'] = '************' self.workspace2 = storage_test_utils.create_workspace(json_config=self.config2) rest.login_client(self.client, is_staff=True)
def test_warnings(self): """Tests validating a new workspace where the broker type is changed.""" json_config = { 'broker': { 'type': 'host', 'host_path': '/host/path', }, } storage_test_utils.create_workspace(name='ws-test', json_config=json_config) json_data = { 'name': 'ws-test', 'json_config': { 'broker': { 'type': 'nfs', 'nfs_path': 'host:/dir', }, }, } url = rest_util.get_url('/workspaces/validation/') response = self.client.generic('POST', url, json.dumps(json_data), 'application/json') self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) results = json.loads(response.content) self.assertEqual(len(results['warnings']), 1) self.assertEqual(results['warnings'][0]['id'], 'broker_type')
def test_deleted_file(self): """Tests calling ScaleFileManager.download_files() with a deleted file""" workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace_1) local_path_1 = '/my/local/path/file.txt' file_2 = storage_test_utils.create_file(workspace=workspace_1) local_path_2 = '/another/local/path/file.txt' file_2.is_deleted = True file_2.save() file_3 = storage_test_utils.create_file(workspace=workspace_1) local_path_3 = '/another/local/path/file.json' workspace_1.download_files = MagicMock() workspace_2 = storage_test_utils.create_workspace() file_4 = storage_test_utils.create_file(workspace=workspace_2) local_path_4 = '/my/local/path/4/file.txt' file_5 = storage_test_utils.create_file(workspace=workspace_2) local_path_5 = '/another/local/path/5/file.txt' workspace_2.download_files = MagicMock() files = [FileDownload(file_1, local_path_1, False), FileDownload(file_2, local_path_2, False), FileDownload(file_3, local_path_3, False), FileDownload(file_4, local_path_4, False), FileDownload(file_5, local_path_5, False)] self.assertRaises(DeletedFile, ScaleFile.objects.download_files, files)
def test_success(self): """Tests calling ScaleFileManager.move_files() successfully""" workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(file_name='my_file_1.txt', workspace=workspace_1) new_workspace_path_1 = os.path.join('my', 'new', 'path', '1', os.path.basename(file_1.file_path)) file_2 = storage_test_utils.create_file(file_name='my_file_2.txt', workspace=workspace_1) new_workspace_path_2 = os.path.join('my', 'new', 'path', '2', os.path.basename(file_2.file_path)) workspace_1.move_files = MagicMock() workspace_2 = storage_test_utils.create_workspace() file_3 = storage_test_utils.create_file(file_name='my_file_3.txt', workspace=workspace_2) new_workspace_path_3 = os.path.join('my', 'new', 'path', '3', os.path.basename(file_3.file_path)) file_4 = storage_test_utils.create_file(file_name='my_file_4.txt', workspace=workspace_2) new_workspace_path_4 = os.path.join('my', 'new', 'path', '4', os.path.basename(file_4.file_path)) workspace_2.move_files = MagicMock() files = [FileMove(file_1, new_workspace_path_1), FileMove(file_2, new_workspace_path_2), FileMove(file_3, new_workspace_path_3), FileMove(file_4, new_workspace_path_4)] ScaleFile.objects.move_files(files) workspace_1.move_files.assert_called_once_with([FileMove(file_1, new_workspace_path_1), FileMove(file_2, new_workspace_path_2)]) workspace_2.move_files.assert_called_once_with([FileMove(file_3, new_workspace_path_3), FileMove(file_4, new_workspace_path_4)])
def test_deleted_file(self): '''Tests calling ScaleFileManager.download_files() with a deleted file''' download_dir = os.path.join('download', 'dir') work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace_1) local_path_1 = u'my/local/path/file.txt' file_2 = storage_test_utils.create_file(workspace=workspace_1) local_path_2 = u'another/local/path/file.txt' file_2.is_deleted = True file_2.save() file_3 = storage_test_utils.create_file(workspace=workspace_1) local_path_3 = u'another/local/path/file.json' workspace_1.download_files = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir( work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() file_4 = storage_test_utils.create_file(workspace=workspace_2) local_path_4 = u'my/local/path/4/file.txt' file_5 = storage_test_utils.create_file(workspace=workspace_2) local_path_5 = u'another/local/path/5/file.txt' workspace_2.download_files = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir( work_dir, workspace_2) files = [(file_1, local_path_1), (file_2, local_path_2), (file_3, local_path_3), (file_4, local_path_4), (file_5, local_path_5)] self.assertRaises(DeletedFile, ScaleFile.objects.download_files, download_dir, work_dir, files)
def test_inactive_workspace(self): """Tests calling ScaleFileManager.move_files() with an inactive workspace""" workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(file_name='my_file_1.txt', workspace=workspace_1) new_workspace_path_1 = os.path.join('my', 'new', 'path', '1', os.path.basename(file_1.file_path)) file_2 = storage_test_utils.create_file(file_name='my_file_2.txt', workspace=workspace_1) new_workspace_path_2 = os.path.join('my', 'new', 'path', '2', os.path.basename(file_2.file_path)) workspace_1.move_files = MagicMock() workspace_2 = storage_test_utils.create_workspace() workspace_2.is_active = False workspace_2.save() file_3 = storage_test_utils.create_file(file_name='my_file_3.txt', workspace=workspace_2) new_workspace_path_3 = os.path.join('my', 'new', 'path', '3', os.path.basename(file_3.file_path)) file_4 = storage_test_utils.create_file(file_name='my_file_4.txt', workspace=workspace_2) new_workspace_path_4 = os.path.join('my', 'new', 'path', '4', os.path.basename(file_4.file_path)) workspace_2.move_files = MagicMock() files = [ FileMove(file_1, new_workspace_path_1), FileMove(file_2, new_workspace_path_2), FileMove(file_3, new_workspace_path_3), FileMove(file_4, new_workspace_path_4) ] self.assertRaises(ArchivedWorkspace, ScaleFile.objects.move_files, files)
def test_successful(self): """Tests calling IngestTriggerRuleConfiguration.validate() successfully""" workspace_name = 'Test_Workspace' storage_utils.create_workspace(name=workspace_name) json_str = '{"condition": {"media_type": "text/plain", "data_types": ["A", "B"]}, "data": {"input_data_name": "my_input", "workspace_name": "%s"}}' % workspace_name rule = IngestTriggerRuleConfiguration(INGEST_TYPE, json.loads(json_str)) rule.validate()
def test_successful(self): """Tests calling ParseTriggerRuleConfiguration.validate() successfully""" workspace_name = 'Test_Workspace' storage_utils.create_workspace(name=workspace_name) json_str = '{"condition": {"media_type": "text/plain", "data_types": ["A", "B"]}, "data": {"input_data_name": "my_input", "workspace_name": "%s"}}' % workspace_name rule = ParseTriggerRuleConfiguration(PARSE_TYPE, json.loads(json_str)) rule.validate()
def test_success(self, mock_makedirs): '''Tests calling ScaleFileManager.move_files() successfully''' work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(file_name='my_file_1.txt', workspace=workspace_1) old_path_1 = file_1.file_path new_workspace_path_1 = os.path.join('my', 'new', 'path', '1', os.path.basename(file_1.file_path)) file_2 = storage_test_utils.create_file(file_name='my_file_2.txt', workspace=workspace_1) old_path_2 = file_2.file_path new_workspace_path_2 = os.path.join('my', 'new', 'path', '2', os.path.basename(file_2.file_path)) workspace_1.move_files = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir( work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() file_3 = storage_test_utils.create_file(file_name='my_file_3.txt', workspace=workspace_2) old_path_3 = file_3.file_path new_workspace_path_3 = os.path.join('my', 'new', 'path', '3', os.path.basename(file_3.file_path)) file_4 = storage_test_utils.create_file(file_name='my_file_4.txt', workspace=workspace_2) old_path_4 = file_4.file_path new_workspace_path_4 = os.path.join('my', 'new', 'path', '4', os.path.basename(file_4.file_path)) workspace_2.move_files = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir( work_dir, workspace_2) files = [ (file_1, new_workspace_path_1), (file_2, new_workspace_path_2), (file_3, new_workspace_path_3), (file_4, new_workspace_path_4) ] ScaleFile.objects.move_files(work_dir, files) workspace_1.move_files.assert_called_once_with( workspace_1_work_dir, [(old_path_1, new_workspace_path_1), (old_path_2, new_workspace_path_2)]) workspace_2.move_files.assert_called_once_with( workspace_2_work_dir, [(old_path_3, new_workspace_path_3), (old_path_4, new_workspace_path_4)]) # Check models for new workspace paths new_file_1 = ScaleFile.objects.get(id=file_1.id) self.assertEqual(new_file_1.file_path, new_workspace_path_1) new_file_2 = ScaleFile.objects.get(id=file_2.id) self.assertEqual(new_file_2.file_path, new_workspace_path_2) new_file_3 = ScaleFile.objects.get(id=file_3.id) self.assertEqual(new_file_3.file_path, new_workspace_path_3) new_file_4 = ScaleFile.objects.get(id=file_4.id) self.assertEqual(new_file_4.file_path, new_workspace_path_4)
def test_inactive_workspace(self): """Tests calling deleting files from an inactive workspace""" workspace_1 = storage_test_utils.create_workspace() workspace_1.download_files = MagicMock() file_1 = storage_test_utils.create_file(workspace=workspace_1) workspace_2 = storage_test_utils.create_workspace(is_active=False) file_2 = storage_test_utils.create_file(workspace=workspace_2) files = [file_1, file_2] self.assertRaises(ArchivedWorkspace, ScaleFile.objects.delete_files, files)
def setUp(self): django.setup() self.count = 1 self.job_type = job_test_utils.create_seed_job_type() self.job = job_test_utils.create_job(job_type=self.job_type) self.job_exe = job_test_utils.create_job_exe(status='COMPLETED', job=self.job) self.wp1 = storage_test_utils.create_workspace() self.wp2 = storage_test_utils.create_workspace() self.prod1 = storage_test_utils.create_file(file_type='PRODUCT', workspace=self.wp1, job_exe=self.job_exe) self.prod2 = storage_test_utils.create_file(file_type='PRODUCT', workspace=self.wp1, job_exe=self.job_exe) self.prod3 = storage_test_utils.create_file(file_type='PRODUCT', workspace=self.wp2, job_exe=self.job_exe) self.file_1 = storage_test_utils.create_file(file_type='SOURCE') self.event = trigger_test_utils.create_trigger_event() PurgeResults.objects.create(source_file_id=self.file_1.id, trigger_event=self.event)
def test_url_base_url_missing(self): '''Tests building a URL for a file in a workspace with no configured base URL.''' ws = storage_test_utils.create_workspace(name='test') file = storage_test_utils.create_file(file_name='test.txt', workspace=ws) self.assertIsNone(file.url)
def create_product(job_exe=None, workspace=None, has_been_published=False, is_published=False, uuid=None, file_name='my_test_file.txt', file_path='/file/path/my_test_file.txt', media_type='text/plain', file_size=100, countries=None, is_superseded=False, superseded=None): """Creates a product file model for unit testing :returns: The product model :rtype: :class:`product.models.ProductFile` """ if not job_exe: job_exe = job_utils.create_job_exe() if not workspace: workspace = storage_utils.create_workspace() if not uuid: builder = hashlib.md5() builder.update(str(job_exe.job.job_type.id)) builder.update(file_name) uuid = builder.hexdigest() if is_superseded and not superseded: superseded = timezone.now() product_file = ProductFile.objects.create(job_exe=job_exe, job=job_exe.job, job_type=job_exe.job.job_type, has_been_published=has_been_published, is_published=is_published, uuid=uuid, file_name=file_name, media_type=media_type, file_size=file_size, file_path=file_path, workspace=workspace, is_superseded=is_superseded, superseded=superseded) if countries: product_file.countries = countries product_file.save() return product_file
def create_product(job_exe=None, workspace=None, has_been_published=False, file_name='my_test_file.txt', file_path='/file/path/my_test_file.txt', media_type='text/plain', file_size=100, countries=None): """Creates a product file model for unit testing :returns: The product model :rtype: :class:`product.models.ProductFile` """ if not job_exe: job_exe = job_utils.create_job_exe() if not workspace: workspace = storage_utils.create_workspace() product_file = ProductFile.objects.create( job_exe=job_exe, job=job_exe.job, job_type=job_exe.job.job_type, has_been_published=has_been_published, file_name=file_name, media_type=media_type, file_size=file_size, file_path=file_path, workspace=workspace) if countries: product_file.countries = countries product_file.save() return product_file
def test_success(self, mock_rmdir, mock_exists, mock_listdir): '''Tests calling ScaleFileManager.cleanup_upload_dir() successfully''' upload_dir = os.path.join('upload', 'dir') work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() workspace_1.cleanup_upload_dir = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir( work_dir, workspace_1) def new_exists(path): return path == workspace_1_work_dir mock_exists.side_effect = new_exists def new_listdir(path): return [] mock_listdir.side_effect = new_listdir ScaleFile.objects.cleanup_upload_dir(upload_dir, work_dir, workspace_1) workspace_1.cleanup_upload_dir.assert_called_once_with( upload_dir, workspace_1_work_dir)
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None, ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None, strike=None, source_file=None): if not workspace: workspace = storage_test_utils.create_workspace() if not strike: strike = create_strike() if not source_file: source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started, data_ended=data_ended, workspace=workspace) if not transfer_started: transfer_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started: ingest_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended: ingest_ended = timezone.now() try: job_type = Ingest.objects.get_ingest_job_type() except: job_type = job_utils.create_job_type() job = job_utils.create_job(job_type=job_type) job_utils.create_job_exe(job=job) return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job, bytes_transferred=source_file.file_size, transfer_started=transfer_started, transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started, ingest_ended=ingest_ended, workspace=workspace, strike=strike, source_file=source_file)
def create_source(file_name='my_test_file.txt', file_size=100, media_type='text/plain', file_path='/file/path/my_test_file.txt', data_started=None, data_ended=None, is_parsed=True, parsed=None, workspace=None, countries=None, data_type_tags=[]): """Creates a source file model for unit testing :returns: The source file model :rtype: :class:`storage.models.ScaleFile` """ if not data_started: data_started = timezone.now() if not data_ended: data_ended = data_started if not parsed and is_parsed: parsed = timezone.now() if not workspace: workspace = storage_utils.create_workspace() source_file = ScaleFile.objects.create(file_name=file_name, file_type='SOURCE', media_type=media_type, file_size=file_size, file_path=file_path, data_started=data_started, data_ended=data_ended, is_parsed=is_parsed, parsed=parsed, data_type_tags=data_type_tags, workspace=workspace, uuid=hashlib.md5(file_name).hexdigest()) if countries: source_file.countries = countries source_file.save() return source_file
def setUp(self): django.setup() self.ingest = ingest_test_utils.create_ingest(file_name='my_file.txt') self.mount = 'host:/path' self.mount_on = os.path.join('my', 'test') self.workspace = storage_test_utils.create_workspace() self.config = StrikeConfiguration({ 'version': '1.0', 'mount': self.mount, 'transfer_suffix': '_tmp', 'files_to_ingest': [{ 'filename_regex': '.*txt', 'workspace_path': 'foo', 'workspace_name': self.workspace.name, }], }) self.job_exe = job_test_utils.create_job_exe() self.strike_proc = StrikeProcessor(1, self.job_exe.id, self.config) self.strike_dir = SCALE_INGEST_MOUNT_PATH
def create_trigger_rule(name=None, trigger_type='PARSE', configuration=None, is_active=True): '''Creates a trigger rule model for unit testing :returns: The trigger rule model :rtype: :class:`trigger.models.TriggerRule` ''' if not name: global RULE_NAME_COUNTER name = 'test-name-%i' % RULE_NAME_COUNTER RULE_NAME_COUNTER = RULE_NAME_COUNTER + 1 if not configuration: configuration = { 'version': '1.0', 'condition': { 'media_type': 'text/plain', }, 'data': { 'input_data_name': 'input_file', 'workspace_name': storage_test_utils.create_workspace().name, } } return TriggerRule.objects.create(name=name, type=trigger_type, configuration=configuration, is_active=is_active)
def test_url_file_slash(self): '''Tests building a URL for a file where the file path URL has a leading slash.''' ws = storage_test_utils.create_workspace(name='test', base_url='http://localhost') file = storage_test_utils.create_file(file_name='test.txt', file_path='/file/path/test.txt', workspace=ws) self.assertEqual(file.url, 'http://localhost/file/path/test.txt')
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None, ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None, strike=None, scan=None, source_file=None): if not workspace: workspace = storage_test_utils.create_workspace() if not source_file: source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started, data_ended=data_ended, workspace=workspace) if not transfer_started: transfer_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started: ingest_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended: ingest_ended = timezone.now() try: job_type = Ingest.objects.get_ingest_job_type() except: job_type = job_utils.create_job_type() job = job_utils.create_job(job_type=job_type) job_utils.create_job_exe(job=job) return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job, bytes_transferred=source_file.file_size, transfer_started=transfer_started, transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started, ingest_ended=ingest_ended, data_started=source_file.data_started, data_ended=source_file.data_ended, workspace=workspace, strike=strike, scan=scan, source_file=source_file)
def setUp(self): django.setup() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock() self.workspace.delete_files = MagicMock() self.upload_dir = os.path.join('upload', 'dir') self.work_dir = os.path.join('work', 'dir') self.workspace_work_dir = ScaleFile.objects._get_workspace_work_dir(self.work_dir, self.workspace) self.source_file = source_test_utils.create_source(file_name=u'input1.txt', workspace=self.workspace) self.job_exe = job_test_utils.create_job_exe() self.job_exe_no = job_test_utils.create_job_exe() with transaction.atomic(): self.job_exe_no.job.is_operational = False self.job_exe_no.job.job_type.is_operational = False self.job_exe_no.job.save() self.job_exe_no.job.job_type.save() self.files = [ (u'local/1/file.txt', u'remote/1/file.txt', None), (u'local/2/file.json', u'remote/2/file.json', u'application/x-custom-json'), ] self.files_no = [ (u'local/3/file.h5', u'remote/3/file.h5', u'image/x-hdf5-image'), ]
def test_success(self): """Tests deleting files successfully""" workspace_1 = storage_test_utils.create_workspace() workspace_1.delete_files = MagicMock() file_1 = storage_test_utils.create_file(workspace=workspace_1) workspace_2 = storage_test_utils.create_workspace() workspace_2.delete_files = MagicMock() file_2 = storage_test_utils.create_file(workspace=workspace_2) files = [file_1, file_2] ScaleFile.objects.delete_files(files) workspace_1.delete_files.assert_called_once_with([file_1]) workspace_2.delete_files.assert_called_once_with([file_2])
def test_success(self, mock_getsize): """Tests calling ScaleFileManager.upload_files() successfully""" def new_getsize(path): return 100 mock_getsize.side_effect = new_getsize workspace = storage_test_utils.create_workspace() file_1 = ScaleFile() file_1.set_basic_fields('file.txt', 100, None) # Scale should auto-detect text/plain remote_path_1 = 'my/remote/path/file.txt' local_path_1 = 'my/local/path/file.txt' file_1.file_path = remote_path_1 file_2 = ScaleFile() file_2.set_basic_fields('file.json', 100, 'application/json') remote_path_2 = 'my/remote/path/2/file.json' local_path_2 = 'my/local/path/2/file.json' file_2.file_path = remote_path_2 workspace.upload_files = MagicMock() files = [FileUpload(file_1, local_path_1), FileUpload(file_2, local_path_2)] models = ScaleFile.objects.upload_files(workspace, files) workspace.upload_files.assert_called_once_with([FileUpload(file_1, local_path_1), FileUpload(file_2, local_path_2)]) self.assertEqual('file.txt', models[0].file_name) self.assertEqual(remote_path_1, models[0].file_path) self.assertEqual('text/plain', models[0].media_type) self.assertEqual(workspace.id, models[0].workspace_id) self.assertEqual('file.json', models[1].file_name) self.assertEqual(remote_path_2, models[1].file_path) self.assertEqual('application/json', models[1].media_type) self.assertEqual(workspace.id, models[1].workspace_id)
def test_fails(self, mock_makedirs, mock_getsize): """Tests calling ScaleFileManager.upload_files() when Workspace.upload_files() fails""" def new_getsize(path): return 100 mock_getsize.side_effect = new_getsize upload_dir = os.path.join('upload', 'dir') work_dir = os.path.join('work', 'dir') workspace = storage_test_utils.create_workspace() file_1 = ScaleFile() file_1.media_type = None # Scale should auto-detect text/plain remote_path_1 = 'my/remote/path/file.txt' local_path_1 = 'my/local/path/file.txt' file_2 = ScaleFile() file_2.media_type = 'application/json' remote_path_2 = 'my/remote/path/2/file.json' local_path_2 = 'my/local/path/2/file.json' workspace.upload_files = MagicMock() workspace.upload_files.side_effect = Exception workspace.delete_files = MagicMock() delete_work_dir = os.path.join(work_dir, 'delete', get_valid_filename(workspace.name)) files = [(file_1, local_path_1, remote_path_1), (file_2, local_path_2, remote_path_2)] self.assertRaises(Exception, ScaleFile.objects.upload_files, upload_dir, work_dir, workspace, files)
def create_scan(name=None, title=None, description=None, configuration=None): if not name: global NAME_COUNTER name = 'test-scan-%i' % NAME_COUNTER NAME_COUNTER = NAME_COUNTER + 1 if not title: title = 'Test Scan' if not description: description = 'Test description' if not configuration: workspace = storage_test_utils.create_workspace() configuration = { 'version': '1.0', 'workspace': workspace.name, 'scanner': { 'type': 'dir' }, 'recursive': True, 'files_to_ingest': [{ 'filename_regex': '.*' }] } return Scan.objects.create(name=name, title=title, description=description, configuration=configuration)
def test_success_new(self, mock_mkdir, mock_getsize, mock_execute): """Tests calling SourceFileManager.store_file() successfully with a new source file""" def new_getsize(path): return 100 mock_getsize.side_effect = new_getsize work_dir = 'work' workspace = storage_utils.create_workspace() remote_path = u'my/remote/path/file.txt' local_path = u'my/local/path/file.txt' workspace.cleanup_upload_dir = MagicMock() workspace.upload_files = MagicMock() workspace.setup_upload_dir = MagicMock() workspace.delete_files = MagicMock() wksp_upload_dir = os.path.join(work_dir, 'upload') wksp_work_dir = os.path.join(work_dir, 'work', 'workspaces', get_valid_filename(workspace.name)) src_file = SourceFile.objects.store_file(work_dir, local_path, [], workspace, remote_path) workspace.upload_files.assert_called_once_with(wksp_upload_dir, wksp_work_dir, [('file.txt', remote_path)]) self.assertListEqual(workspace.delete_files.call_args_list, []) self.assertEqual(u'file.txt', src_file.file_name) self.assertEqual(u'3d8e577bddb17db339eae0b3d9bcf180', src_file.uuid) self.assertEqual(remote_path, src_file.file_path) self.assertEqual(u'text/plain', src_file.media_type) self.assertEqual(workspace.id, src_file.workspace_id)
def test_successful(self): """Tests calling the queue recipe view successfully.""" recipe_type = recipe_test_utils.create_recipe_type() workspace = storage_test_utils.create_workspace() recipe_data = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id, } json_data = { 'recipe_type_id': recipe_type.id, 'recipe_data': recipe_data, } url = rest_util.get_url('/queue/new-recipe/') response = self.client.generic('POST', url, json.dumps(json_data), 'application/json') self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content) result = json.loads(response.content) self.assertTrue(response['Location']) self.assertEqual(result['recipe_type']['id'], recipe_type.id)
def setUp(self): django.setup() self.interface = { 'version': '1.1', 'command': 'test_cmd', 'command_arguments': 'test_arg', 'input_data': [{ 'media_types': ['image/png'], 'type': 'file', 'name': 'input_file', }], 'output_data': [{ 'name': 'output_file', 'type': 'file', 'media_type': 'image/png', }], 'shared_resources': [], } self.job_type = job_test_utils.create_job_type( interface=self.interface) self.workspace = storage_test_utils.create_workspace() self.file1 = storage_test_utils.create_file(workspace=self.workspace)
def create_source(file_name='my_test_file.txt', file_size=100, media_type='text/plain', file_path='/file/path/my_test_file.txt', data_started=None, data_ended=None, is_parsed=True, parsed=None, workspace=None, countries=None): """Creates a source file model for unit testing :returns: The source file model :rtype: :class:`source.models.SourceFile` """ if not data_started: data_started = timezone.now() if not data_ended: data_ended = data_started if not parsed and is_parsed: parsed = timezone.now() if not workspace: workspace = storage_utils.create_workspace() source_file = SourceFile.objects.create(file_name=file_name, media_type=media_type, file_size=file_size, file_path=file_path, data_started=data_started, data_ended=data_ended, is_parsed=is_parsed, parsed=parsed, workspace=workspace, uuid=hashlib.md5(file_name).hexdigest()) if countries: source_file.countries = countries source_file.save() return source_file
def test_deleted_file(self): """Tests attempting to delete a file that is already deleted""" workspace_1 = storage_test_utils.create_workspace() workspace_1.delete_files = MagicMock() file_1a = storage_test_utils.create_file(workspace=workspace_1) file_1b = storage_test_utils.create_file(workspace=workspace_1, is_deleted=True) workspace_2 = storage_test_utils.create_workspace() workspace_2.delete_files = MagicMock() file_2 = storage_test_utils.create_file(workspace=workspace_2) files = [file_1a, file_1b, file_2] ScaleFile.objects.delete_files(files) workspace_1.delete_files.assert_called_once_with([file_1a, file_1b]) workspace_2.delete_files.assert_called_once_with([file_2])
def setUp(self): django.setup() self.workspace = storage_test_utils.create_workspace() self.recipe_type_1 = RecipeType.objects.create(name='Recipe 1', version='1.0', description='Description of Recipe 1', definition='') self.recipe_type_2 = RecipeType.objects.create(name='Recipe 2', version='1.0', description='Description of Recipe 2', definition='')
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.event = trigger_test_utils.create_trigger_event() interface_1 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 1", "type": "file", "media_types": ["text/plain"]}], "output_data": [{"name": "Test Output 1", "type": "files", "media_type": "image/png"}], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 2", "type": "files", "media_types": ["image/png", "image/tiff"]}], "output_data": [{"name": "Test Output 2", "type": "file"}], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { "version": "1.0", "input_data": [{"name": "Recipe Input", "type": "file", "media_types": ["text/plain"]}], "jobs": [ { "name": "Job 1", "job_type": {"name": self.job_type_1.name, "version": self.job_type_1.version}, "recipe_inputs": [{"recipe_input": "Recipe Input", "job_input": "Test Input 1"}], }, { "name": "Job 2", "job_type": {"name": self.job_type_2.name, "version": self.job_type_2.version}, "dependencies": [ {"name": "Job 1", "connections": [{"output": "Test Output 1", "input": "Test Input 2"}]} ], }, ], } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.data = { "version": "1.0", "input_data": [{"name": "Recipe Input", "file_id": source_file.id}], "workspace_id": workspace.id, } # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def test_success(self): '''Tests calling ScaleFileManager.move_files() successfully''' work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(file_name='my_file_1.txt', workspace=workspace_1) old_path_1 = file_1.file_path new_workspace_path_1 = os.path.join('my', 'new', 'path', '1', os.path.basename(file_1.file_path)) file_2 = storage_test_utils.create_file(file_name='my_file_2.txt', workspace=workspace_1) old_path_2 = file_2.file_path new_workspace_path_2 = os.path.join('my', 'new', 'path', '2', os.path.basename(file_2.file_path)) workspace_1.move_files = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() file_3 = storage_test_utils.create_file(file_name='my_file_3.txt', workspace=workspace_2) old_path_3 = file_3.file_path new_workspace_path_3 = os.path.join('my', 'new', 'path', '3', os.path.basename(file_3.file_path)) file_4 = storage_test_utils.create_file(file_name='my_file_4.txt', workspace=workspace_2) old_path_4 = file_4.file_path new_workspace_path_4 = os.path.join('my', 'new', 'path', '4', os.path.basename(file_4.file_path)) workspace_2.move_files = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_2) files = [(file_1, new_workspace_path_1), (file_2, new_workspace_path_2), (file_3, new_workspace_path_3), (file_4, new_workspace_path_4)] ScaleFile.objects.move_files(work_dir, files) workspace_1.move_files.assert_called_once_with(workspace_1_work_dir, [(old_path_1, new_workspace_path_1), (old_path_2, new_workspace_path_2)]) workspace_2.move_files.assert_called_once_with(workspace_2_work_dir, [(old_path_3, new_workspace_path_3), (old_path_4, new_workspace_path_4)]) # Check models for new workspace paths new_file_1 = ScaleFile.objects.get(id=file_1.id) self.assertEqual(new_file_1.file_path, new_workspace_path_1) new_file_2 = ScaleFile.objects.get(id=file_2.id) self.assertEqual(new_file_2.file_path, new_workspace_path_2) new_file_3 = ScaleFile.objects.get(id=file_3.id) self.assertEqual(new_file_3.file_path, new_workspace_path_3) new_file_4 = ScaleFile.objects.get(id=file_4.id) self.assertEqual(new_file_4.file_path, new_workspace_path_4)
def setUp(self): django.setup() self.config = { 'broker': { 'type': 'host', 'host_path': '/host/path', }, } self.workspace = storage_test_utils.create_workspace(json_config=self.config)
def test_success(self): '''Tests calling ScaleFileManager.download_files() successfully''' download_dir = os.path.join('download', 'dir') work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace_1) local_path_1 = u'my/local/path/file.txt' file_2 = storage_test_utils.create_file(workspace=workspace_1) local_path_2 = u'another/local/path/file.txt' file_3 = storage_test_utils.create_file(workspace=workspace_1) local_path_3 = u'another/local/path/file.json' workspace_1.setup_download_dir = MagicMock() workspace_1.download_files = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_1) workspace_2 = storage_test_utils.create_workspace() file_4 = storage_test_utils.create_file(workspace=workspace_2) local_path_4 = u'my/local/path/4/file.txt' file_5 = storage_test_utils.create_file(workspace=workspace_2) local_path_5 = u'another/local/path/5/file.txt' workspace_2.setup_download_dir = MagicMock() workspace_2.download_files = MagicMock() workspace_2_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_2) files = [(file_1, local_path_1), (file_2, local_path_2), (file_3, local_path_3), (file_4, local_path_4), (file_5, local_path_5)] ScaleFile.objects.download_files(download_dir, work_dir, files) workspace_1.setup_download_dir.assert_called_once_with(download_dir, workspace_1_work_dir) workspace_1.download_files.assert_called_once_with(download_dir, workspace_1_work_dir, [(file_1.file_path, local_path_1), (file_2.file_path, local_path_2), (file_3.file_path, local_path_3)]) workspace_2.setup_download_dir.assert_called_once_with(download_dir, workspace_2_work_dir) workspace_2.download_files.assert_called_once_with(download_dir, workspace_2_work_dir, [(file_4.file_path, local_path_4), (file_5.file_path, local_path_5)])
def test_broker_validation_error(self): """Tests that attempting to get a bad broker instance raises an error.""" config = { 'version': '1.0', 'broker': { 'type': 'host', }, } workspace = storage_test_utils.create_workspace(json_config=config) # No exceptions indicates success self.assertRaises(InvalidBrokerConfiguration, workspace.get_broker)
def test_success(self): '''Tests calling ScaleFileManager.setup_upload_dir() successfully''' upload_dir = os.path.join('upload', 'dir') work_dir = os.path.join('work', 'dir') workspace_1 = storage_test_utils.create_workspace() workspace_1.setup_upload_dir = MagicMock() workspace_1_work_dir = ScaleFile.objects._get_workspace_work_dir(work_dir, workspace_1) ScaleFile.objects.setup_upload_dir(upload_dir, work_dir, workspace_1) workspace_1.setup_upload_dir.assert_called_once_with(upload_dir, workspace_1_work_dir)
def test_inactive_workspace(self): """Tests calling ScaleFileManager.move_files() with an inactive workspace""" workspace_1 = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(file_name='my_file_1.txt', workspace=workspace_1) new_workspace_path_1 = os.path.join('my', 'new', 'path', '1', os.path.basename(file_1.file_path)) file_2 = storage_test_utils.create_file(file_name='my_file_2.txt', workspace=workspace_1) new_workspace_path_2 = os.path.join('my', 'new', 'path', '2', os.path.basename(file_2.file_path)) workspace_1.move_files = MagicMock() workspace_2 = storage_test_utils.create_workspace() workspace_2.is_active = False workspace_2.save() file_3 = storage_test_utils.create_file(file_name='my_file_3.txt', workspace=workspace_2) new_workspace_path_3 = os.path.join('my', 'new', 'path', '3', os.path.basename(file_3.file_path)) file_4 = storage_test_utils.create_file(file_name='my_file_4.txt', workspace=workspace_2) new_workspace_path_4 = os.path.join('my', 'new', 'path', '4', os.path.basename(file_4.file_path)) workspace_2.move_files = MagicMock() files = [FileMove(file_1, new_workspace_path_1), FileMove(file_2, new_workspace_path_2), FileMove(file_3, new_workspace_path_3), FileMove(file_4, new_workspace_path_4)] self.assertRaises(ArchivedWorkspace, ScaleFile.objects.move_files, files)
def test_broker_validation(self): """Tests that getting the broker instance performs validation.""" config = { 'version': '1.0', 'broker': { 'type': 'host', 'host_path': '/host/path' }, } workspace = storage_test_utils.create_workspace(json_config=config) # No exceptions indicates success workspace.get_broker()
def setUp(self): django.setup() self.interface = { "version": "1.1", "command": "test_cmd", "command_arguments": "test_arg", "input_data": [{"media_types": ["image/png"], "type": "file", "name": "input_file"}], "output_data": [{"name": "output_file", "type": "file", "media_type": "image/png"}], "shared_resources": [], } self.job_type = job_test_utils.create_job_type(interface=self.interface) self.workspace = storage_test_utils.create_workspace() self.file1 = storage_test_utils.create_file(workspace=self.workspace)