def test_add_to_validation_sqs__adds_correct_event_to_queue(self): uploaded_file = UploadedFile.create( upload_area=self.upload_area, name="file2", content_type="application/octet-stream; dcp-type=data", data="file2_content") validation_scheduler = ValidationScheduler(self.upload_area_id, [uploaded_file]) validation_uuid = validation_scheduler.add_to_validation_sqs( ["filename123"], "test_docker_image", {"variable": "variable"}, "123456") message = self.sqs.meta.client.receive_message( QueueUrl='test_validation_q_url') message_body = json.loads(message['Messages'][0]['Body']) record = UploadDB().get_pg_record("validation", validation_uuid, column='id') self.assertEqual(message_body["filenames"], ["filename123"]) self.assertEqual(message_body["validation_id"], validation_uuid) self.assertEqual(message_body["validator_docker_image"], "test_docker_image") self.assertEqual(message_body["environment"], {"variable": "variable"}) self.assertEqual(message_body["orig_validation_id"], "123456") self.assertEqual(message_body["upload_area_uuid"], uploaded_file.upload_area.uuid) self.assertEqual(record["status"], "SCHEDULING_QUEUED")
def test_check_files_can_be_validated__if_file_is_too_large__returns_true( self): uploaded_file = UploadedFile.create( upload_area=self.upload_area, name="file2", content_type="application/octet-stream; dcp-type=data", data="file2_content") scheduler = ValidationScheduler(self.upload_area_id, [uploaded_file]) file_validatable = scheduler.check_files_can_be_validated() self.assertEqual(True, file_validatable)
def test__create_validation_event__creates_event_with_correct_status(self): uploaded_file = UploadedFile.create( upload_area=self.upload_area, name="file2#", content_type="application/octet-stream; dcp-type=data", data="file2_content") scheduler = ValidationScheduler(self.upload_area_id, [uploaded_file]) validation_id = str(uuid.uuid4()) validation_event = scheduler._create_validation_event( "test_docker_image", validation_id, None) self.assertEqual(validation_event.docker_image, "test_docker_image") self.assertEqual(validation_event.status, "SCHEDULING_QUEUED")
def test__update_validation_event__updates_event_status(self): uploaded_file = UploadedFile.create( upload_area=self.upload_area, name="file2#", content_type="application/octet-stream; dcp-type=data", data="file2_content") scheduler = ValidationScheduler(self.upload_area_id, [uploaded_file]) scheduler.batch_job_id = "123456" validation_id = str(uuid.uuid4()) validation_event = scheduler._create_validation_event( "test_docker_image", validation_id, None) self.assertEqual(validation_event.job_id, None) validation_event = scheduler._update_validation_event( "test_docker_image", validation_id, None) self.assertEqual(validation_event.job_id, "123456") self.assertEqual(validation_event.status, "SCHEDULED")
def test_create__creates_a_new_s3_object_and_db_record(self): filename = f"file-{random.randint(0, 999999999)}" content_type = "application/octet-stream; dcp-type=data" file_content = "file1_content" uf = UploadedFile.create(upload_area=self.upload_area, name=filename, content_type=content_type, data=file_content) self.assertIsInstance(uf, UploadedFile) # S3 Object s3_key = f"{self.upload_area_id}/{filename}" s3object = self.upload_bucket.Object(s3_key) self.assertEqual(content_type, s3object.content_type) self.assertEqual(file_content.encode('utf8'), s3object.get()['Body'].read()) # DB Record record = self.db.query(DbFile).filter(DbFile.s3_key == s3_key, DbFile.s3_etag == s3object.e_tag.strip('\"')).one() self.assertEqual(s3_key, record.s3_key) self.assertEqual(filename, record.name) self.assertEqual(s3object.e_tag.strip('\"'), record.s3_etag) self.assertEqual(len(file_content), record.size) self.assertEqual(self.upload_area.db_id, record.upload_area_id)