def test_imports_get_by_id(self): """Test imports get item by id""" user = all_models.Person.query.first() import_job = factories.ImportExportFactory(job_type="Import", status="Finished", created_by=user, created_at=datetime.now()) response = self.api.client.get("/api/people/{}/imports/{}".format( user.id, import_job.id), headers=self.headers) result = json.loads(response.data) observed_columns = set(result.keys()) expected_columns = set( column.name for column in all_models.ImportExport.__table__.columns if column.name not in ('content', 'gdrive_metadata')) self.assertEqual(observed_columns, expected_columns)
def test_import_stop(self): """Test import stop""" user = all_models.Person.query.first() ie1 = factories.ImportExportFactory( job_type="Import", status="Analysis", created_at=datetime.now(), created_by=user, title="test.csv", content="test content", ) response = self.client.put("/api/people/{}/imports/{}/stop".format( user.id, ie1.id), headers=self.headers) self.assert200(response) self.assertEqual(json.loads(response.data)["status"], "Stopped")
def test_download_unicode_filename(self, filename): """Test import history download unicode filename""" user = all_models.Person.query.first() import_export = factories.ImportExportFactory( job_type='Import', status='Finished', created_at=datetime.now(), created_by=user, title=filename, content='Test content') response = self.client.get( "/api/people/{}/imports/{}/download?export_to=csv".format( user.id, import_export.id), headers=self.headers) self.assert200(response) self.assertEqual(response.data, "Test content")
def test_export_stop(self): """Test export stop""" user = all_models.Person.query.first() bg_task_name = "test export" instance_name = "test instance" export_op_type = all_models.BackgroundOperationType.query.filter_by( name="export" ).first() with factories.single_commit(): ie_job = factories.ImportExportFactory( job_type="Export", status="In Progress", created_at=datetime.now(), created_by=user, title="test.csv", content="test content", ) bg_task = factories.BackgroundTaskFactory(name=bg_task_name) factories.BackgroundOperationFactory( object_type=ie_job.type, object_id=ie_job.id, bg_task_id=bg_task.id, bg_operation_type=export_op_type, ) with mock.patch("ggrc.settings.APPENGINE_INSTANCE", new=instance_name): with mock.patch("ggrc.cloud_api.task_queue.delete_task") as delete_task: response = self.client.put( "/api/people/{}/exports/{}/stop".format(user.id, ie_job.id), headers=self.headers ) self.assert200(response) self.assertEqual(json.loads(response.data)["status"], "Stopped") task_name = "projects/{}/locations/{}/queues/{}/tasks/{}".format( instance_name, "us-central1", "ggrcImport", bg_task_name ) delete_task.assert_called_once_with(task_name) bg_task = all_models.BackgroundTask.query.filter_by( name=bg_task_name ).first() self.assertEqual( bg_task.status, all_models.BackgroundTask.STOPPED_STATUS )
def test_download(self, job_type): """Test imports/exports download""" user = all_models.Person.query.first() ie1 = factories.ImportExportFactory( job_type=job_type, status="Finished", created_at=datetime.now(), created_by=user, title="test.csv", content="test content") response = self.client.get( "/api/people/{}/{}s/{}/download?export_to=csv".format( user.id, job_type.lower(), ie1.id), headers=self.headers) self.assert200(response) self.assertEqual(response.data, "test content")
def run_full_import(self, user, data): """Emulate full cycle of data importing. Args: user: User object under which import should be run. data: data that should be imported. """ imp_exp = factories.ImportExportFactory(job_type="Import", status="Blocked", created_by=user, created_at=datetime.now(), content=data, title="test") return self.client.put( "/api/people/{}/imports/{}/start".format(user.id, imp_exp.id), headers=self.headers, )
def test_import_export_job(self): """Check if proper ImportExport jobs and bg tasks returned.""" self.init_taskqueue() factories.AuditFactory(slug="audit-1") data = "Object type,,,\n" \ "Assessment,Code*,Audit*,Title*,State*,Assignees*,Creators*\n" \ ",,audit-1,Assessment title 1,,[email protected],[email protected]" user = all_models.Person.query.first() imp_exp = factories.ImportExportFactory( job_type="Import", status="Not Started", created_by=user, created_at=datetime.now(), content=data, ) with mock.patch("ggrc.views.converters.check_for_previous_run"): response = self.client.put( "/api/people/{}/imports/{}/start".format(user.id, imp_exp.id), headers=self.headers, ) self.assert200(response) with mock.patch( "ggrc.models.background_task.BackgroundTask.finish"): response = self.client.put( "/api/people/{}/imports/{}/start".format( user.id, imp_exp.id), headers=self.headers, ) self.assert200(response) imp_exp.status = "In Progress" db.session.add(imp_exp) db.session.commit() tasks = import_export.get_import_export_tasks() self.assertEqual(tasks.count(), 1) _, bg_task = tasks.first() expected_bg = all_models.BackgroundTask.query.order_by( all_models.BackgroundTask.id.desc()).first() self.assertEqual(expected_bg.name, bg_task.name)
def run_full_import(self, user, data): """Emulate full cycle of data importing. Args: user: User object under which import should be run. data: data that should be imported. """ imp_exp = factories.ImportExportFactory( job_type="Import", status="Blocked", created_by=user, created_at=datetime.now(), content=data, ) with mock.patch("ggrc.views.converters.check_for_previous_run"): return self.client.put( "/api/people/{}/imports/{}/start".format(user.id, imp_exp.id), headers=self.headers, )