def test_delete_image_by_image_id(db: SQLAlchemy, graphene_client: Client): image = Image(name="myImage") db.session.add(image) db.session.commit() assert_that(Image.query.get(1)).is_not_none() result = graphene_client.execute(''' mutation { deleteImage(imageId: 1) { image { imageId, name } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "deleteImage": { "image": { "imageId": 1, "name": "myImage" } } } }''') assert_that(Image.query.get(1)).is_none()
def test_create_image(graphene_client: Client): result = graphene_client.execute(''' mutation { createImage(name: "myImage") { image { imageId name public keywords } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "createImage": { "image": { "imageId": 1, "name": "myImage", "public": false, "keywords": [] } } } } ''') image = Image.query.get(1) assert_that(image.id).is_equal_to(1) assert_that(image.name).is_equal_to('myImage') assert_that(image.public).is_false() assert_that(image.keywords).is_empty()
def test_create_image_with_keywords(graphene_client: Client): result = graphene_client.execute(''' mutation { createImage(name: "myImage", keywords: ["foo", "bar", "baz"]) { image { keywords } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "createImage": { "image": { "keywords": [ "foo", "bar", "baz" ] } } } } ''') image = Image.query.get(1) assert_that([keyword.keyword for keyword in image.keywords]).contains_sequence("foo", "bar", "baz")
def test_delete_gallery_by_gallery_id(db: SQLAlchemy, graphene_client: Client): gallery = Gallery(name="myGallery") db.session.add(gallery) db.session.commit() assert_that(Gallery.query.get(1)).is_not_none() result = graphene_client.execute(''' mutation { deleteGallery(galleryId: 1) { gallery { galleryId, name } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "deleteGallery": { "gallery": { "galleryId": 1, "name": "myGallery" } } } }''') assert_that(Gallery.query.get(1)).is_none()
def test_create_post(self): client = Client(self.schema) executed = client.execute( ''' mutation { createPost( data: { workoutDate: "2017-01-10T21:33:15.233Z", remark: "latest" }, performances: [{ event: 17, value: 20, set1: 10 }, { event: 48, value: 10, set1: 20 }] ) { post { id workoutDate remark performances { event { name } value set1 } } } } ''', context_value=MockContext(self.user), ) post = executed.get("data").get('createPost').get("post") self.assertIsNotNone(post)
def test_create_gallery(graphene_client: Client): result = graphene_client.execute(''' mutation { createGallery(name: "galleryName", public: true, position: 78) { gallery { galleryId name public position } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "createGallery": { "gallery": { "galleryId": 1, "name": "galleryName", "public": true, "position": 78 } } } }''') gallery = Gallery.query.get(1) assert_that(gallery.id).is_equal_to(1) assert_that(gallery.name).is_equal_to('galleryName') assert_that(gallery.public).is_true() assert_that(gallery.position).is_equal_to(78)
def test_query_gallery_no_id(graphene_client: Client): result = graphene_client.execute(''' query { gallery { galleryId } } ''') assert_that(result['errors'][0]['message']).is_equal_to('Either id or gallery_id must be specified')
def test_query_gallery_by_id_wrong_type(graphene_client: Client): result = graphene_client.execute(f''' query {{ gallery(id: "{to_global_id("Image", 1)}") {{ galleryId }} }} ''') assert_that(result['errors'][0]['message']).is_equal_to("Wrong id type (expected 'Gallery', got 'Image'")
def test_query_events(self): self.create() client = Client(self.schema) executed = client.execute( ''' query { events(name: "test") { name } } ''', context_value=MockContext(self.user), ) self.assertTrue(len(executed.get('data')) > 0)
def test_query_event(self): self.create() client = Client(self.schema) executed = client.execute( ''' query { event(id: %s) { name } } ''' % self.event.id, context_value=MockContext(self.user), ) event = executed.get('data').get('event') self.assertIsNotNone(event)
def test_create_event(self): client = Client(self.schema) executed = client.execute( ''' mutation { createEvent(data: {name: "test", unit: "KG", value: 10, remark: "test-remark"}) { event { id } } } ''', context_value=MockContext(self.user), ) event = executed.get("data").get('createEvent').get("event") self.assertIsNotNone(event)
def test_directory(log_dir): from ml_dash.config import Args Args.logdir = log_dir client = Client(schema) query = """ query AppQuery ($id: ID!) { directory ( id: $id ) { id name path readme { id name path text(stop:11) } dashConfigs(first:10) { edges { node { id name path yaml text(stop:11) } } } charts(first:10) { edges { node { id name dir path yaml text(stop:11) } } } directories (first:10) { edges { node { id name path directories (first:10) { edges { node { id name } } } } } } experiments (first:10) { edges { node { id name path parameters {keys flat} files (first:10) { edges { node { id, name} } } } } } } } """ path = "/episodeyang/cpc-belief/mdp" r = client.execute(query, variables=dict(id=to_global_id("Directory", path))) if 'errors' in r: raise RuntimeError(r['errors']) else: print(">>") show(r['data'])
class GeolocationsTestCase(TestCase): def setUp(self): alb = factory.CountryFactory(name='Albania', iso2='al', iso3='alb') factory.GeolocationFactory( tag='albania', iso2='al', iso3='alb', object_id=alb.id, content_type_id=16, type='country') asia = factory.RegionFactory( name='asia' ) factory.GeolocationFactory( tag='asia', iso2='', iso3='', object_id=asia.id, content_type_id=17, type='region') self.client = Client(schema) def test_allGeolocation(self): geolocation = Geolocation.objects.first() query = """ {allGeolocations {edges {node { id entryId tag } } } } """ result = self.client.execute(query) self.assertEqual(result['data']['allGeolocations']['edges'][0]['node'] ['tag'], geolocation.tag) def test_allGeolocation_region_polygons(self): # Get region record region = Region.objects.first() polygons = region.polygons polygons_in_json = polygons.geojson # GraphQL Query query = """ {allGeolocations(tag:"asia") { edges { node { id tag region { name polygons } } } } } """ result = schema.execute(query) # Check if polygons at the current record has # the same coordinates with the GraphQL query result_polygons = json.loads( result.data['allGeolocations']['edges'][0]['node']['region'][ 'polygons'] ) result_polygons_in_dict = json.loads(result_polygons) # The centerLonglat is JSONString, so it is needed to convert again # to the Python dictionary polygons_in_dict = json.loads(polygons_in_json) self.assertEqual(polygons_in_dict['coordinates'], result_polygons_in_dict[ 'coordinates']) def test_region_centerlonglat(self): # Get country record region = Region.objects.first() center_longlat = region.center_longlat center_longlat_in_json = center_longlat.geojson # GraphQL Query query = """ {allGeolocations(tag:"asia") {edges {node { entryId tag region{ centerLonglat} } } } } """ result = schema.execute(query) # Check if center_longlat at the current record has # the same coordinates with the GraphQL query result_center_longlat = json.loads( result.data['allGeolocations']['edges'][0]['node']['region'][ 'centerLonglat'] ) # The centerLonglat is JSONString, so it is needed to convert again # to the Python dictionary result_center_longlat_in_dict = json.loads(result_center_longlat) center_longlat_in_dict = json.loads(center_longlat_in_json) self.assertEqual(center_longlat_in_dict['coordinates'], result_center_longlat_in_dict['coordinates'])
class TestEmployeeSchema(TestCase): #********** Setup Query and Mutation ************# def setUp(self): self.client = Client(schema) self.employee = mixer.blend(Employee) #***** Single Employee Query *******# #**** Employee id *******# def test_single_employee_query(self): response = self.client.execute(single_employee_query, variables={"id": self.employee.id}) response_employee = response.get("data").get("employee") # print(response_employee,'ewrwcs') assert response_employee["id"] == str(self.employee.id) #***** Employee list Query *******# #**** Employee List *******# def test_employee_list_query(self): mixer.blend(Employee) mixer.blend(Employee) response = self.client.execute(employee_list_query) # print(response,'dataeeer') allEmployees = response.get("data").get("allEmployees") ok = response.get("data").get("ok") # print(len(allEmployees),'length') assert len(allEmployees) #*********** Employee mutation Creation ************# ### ***** Create Employee *****#### def test_create_employee(self): employee = mixer.blend(Employee) response = self.client.execute(create_employee_mutation, variables={ "firstName": "ramesh", "lastName": "kannan", "username": "******", "email": "*****@*****.**", "phoneNo": "9147894561", "status": True, "description": "Wervfddgdgfgfg" }) employee = response.get("data").get("createEmployee").get("employee") assert len(employee) username = employee.get("username") email = employee.get("email") assert username == employee['username'] assert email == employee['email'] #************** Update mutation Testing *************##### def test_update_employee(self): employee = mixer.blend(Employee) response = self.client.execute(update_employee_mutation, variables={ "id": self.employee.id, "username": "******", "description": "solution" }) response_employee = response.get("data").get("updateEmployee").get( "employee") username = response_employee.get("username") assert username == response_employee["username"] assert username != self.employee.username ## ******** Delete Mutation Testing *********# def test_delete_employee(self): response = self.client.execute(delete_employee_mutation, variables={"id": self.employee.id}) ok = response.get("data").get("deleteEmployee").get("employee") assert ok
class TestTuttigraphQLSchema(TestCase): def setUp(self): self.client = Client(schema) self.ad = mixer.blend(Ad) self.aduser = mixer.blend(AdUser) # check if any data for ad table is returned # a simple test just to see if data is returned, content of data is not checked def test_ads_query(self): response = self.client.execute(ad_all_query) res = response.get("data").get("ads") assert len(response) # check if any data for aduser table is returned # a simple test just to see if data is returned, content of data is not checked def test_adusers_query(self): response = self.client.execute(ad_all_user_query) res = response.get("data").get("adusers") assert len(response) # create ad over the api and check if all attributes are returned correctly def test_createad_query(self): user = mixer.blend(AdUser) payload = { "nr": "1234556791", "title": "test title", "price": "554", "zipcode": "3012", "description": "this is a description test", "category": "food", "dateadded": "12.12.2020", "views": "44", "url": "https://thisisaurl.com", "user": user.name } response = self.client.execute(ad_create_query, variable_values=payload) res = response.get('data').get('createAd') assert res['title'] == payload['title'] assert res['description'] == payload['description'] assert res['url'] == payload['url'] assert res.get('user')['id'] == str(user.id) # assert all values if possible.... # create user over the api and check if all attributes are returned correctly def test_createaduser_query(self): payload = { "name": "a_new_user" } response = self.client.execute(aduser_create_query, variable_values=payload) res = response.get('data').get('createAduser') assert res['name'] == payload['name'] # check if search for title is returned def test_search_query(self): response = self.client.execute(ad_search_query, variable_values={ "search": self.ad.title}) res = response.get('data').get('ads') assert res[0]['title'] == str(self.ad.title) # check if only existing users can be added to an ad def test_false_user_query(self): payload = { "nr": "1234556791", "title": "test title", "price": "554", "zipcode": "3012", "description": "this is a description test", "category": "food", "dateadded": "12.12.2020", "views": "44", "url": "https://thisisaurl.com", "user": "******" } response = self.client.execute(ad_create_query, variable_values=payload) res = response.get('errors')[0] assert res['message'] == 'Invalid User!' # check if deleting an ad works def test_deletead_query(self): response = self.client.execute( ad_delete_query, variable_values={"id": self.ad.id}) res = response.get('data').get('deleteAd') assert res['ok'] == True # check if deleting an user works def test_deleteaduser_query(self): user = mixer.blend(AdUser) response = self.client.execute( aduser_delete_query, variable_values={"id": user.id}) res = response.get('data').get('deleteAduser') assert res['ok'] == True # check if updating an ad works and if the other untouched attributes stay the same def test_updatead_query(self): newtitle = "some new title" response = self.client.execute(ad_update_query, variable_values={ "id": self.ad.id, "newtitle": newtitle}) res = response.get('data').get('updateAd') assert res.get('title') == newtitle assert res.get('description') == self.ad.description assert res.get('url') == self.ad.url # check if updating an user works def test_updateaduser_query(self): newname = "new username" response = self.client.execute(aduser_update_query, variable_values={ "id": self.aduser.id, "newname": newname}) res = response.get('data').get('updateAduser') assert res.get('name') == newname
def test_index_query(): client = Client(schema) executed = client.execute("""query root { root }""") assert executed == {"data": {"root": "this is index page"}}
class CompanyApiTests(MongoTestCase): """Tests the APIs for the People entity""" def setUp(self): super().setUp() self.client = Client(schema) # Populate the test collectiom with person data with open("core/tests/test_data/people.json") as people_json: peoples = json.load(people_json) for people in peoples: people_obj = Person(**people) people_obj.save() # Populate the food collection from test data file with open("core/tests/test_data/food.json") as food_json: food_list = json.load(food_json) for food in food_list: food_obj = Food(**food) food_obj.save() def test_common_friends_invalid_input(self): """Tests if the API returns an error when the number of ids sent is less than 2 for the lookup""" response = self.client.execute(''' { commonFriends(id: [1]) { index, name, age } }''') self.assertEqual(response["errors"][0]["message"], "Lookup needs atleast two index values") def test_common_friends_no_common_friends(self): """Tests if the API returns an error if it is unable to find any common friends""" response = self.client.execute(''' { commonFriends(id: [0,1,2]) { index, name, age } }''') self.assertEqual(response["errors"][0]["message"], "No common friends found") def test_common_friends_for_two_ids(self): """Tests if the API returns the right common friends when it is sent 2 lookup ids""" valid_common_friends = [0] response = self.client.execute(''' { commonFriends(id: [1,2]) { index } }''') response_common_friends = [friend['index'] for friend in response['data']['commonFriends']] self.assertTrue(valid_common_friends == response_common_friends) def test_common_friends_for_more_than_two_ids(self): """Tests if the API returns the right common friends when it is sent more than 2 lookup ids""" valid_common_friends = [0, 2] response = self.client.execute(''' { commonFriends(id: [1,3,4,5]) { index } }''') response_common_friends = [friend['index'] for friend in response['data']['commonFriends']] self.assertTrue(valid_common_friends == response_common_friends) def test_favourite_food_invalid_id(self): """Tests if API returns an error for an invalid person index value""" response = self.client.execute(''' { favouriteFood(id: 10001){ username, age, fruits, vegetables } }''') self.assertEqual(response["errors"][0]["message"], "Person index not found") def test_favourite_food_valid(self): """Tests if the API returns a valid response for a valid person index""" valid_veggies = ["celery", "carrot"] valid_fruits = ["apple", "orange"] name = "Rosemary Hayes" age = 30 response = self.client.execute(''' { favouriteFood(id: 3){ username, age, fruits, vegetables } }''') favouriteFood = response['data']['favouriteFood'] self.assertTrue( favouriteFood['vegetables'] == valid_veggies and favouriteFood['fruits'] == valid_fruits and favouriteFood['username'] == name and favouriteFood['age'] == age )
def test_query_get_people_counter(self): client = Client(schema) executed = client.execute(query_get_people_counter, variables={'room': 'room_a'}) self.assertMatchSnapshot(executed)
class GraphQLTestCase(TestCase): def setUp(self): self.teacher = TeacherFactory(username='******', password='******') self.course = CourseFactory( name='Курс 1', started='2017-05-01T15:12:04+03:00', teacher=self.teacher, ) self.client = GrapheneClient(schema) def test_get_course(self): executed = self.client.execute('''query{ courses{ name teacher{ username } students{ username } } }''') self.assertEqual( executed, { "data": { "courses": [{ "name": "Курс 1", "teacher": { "username": "******" }, "students": [] }] } }) def test_create_course(self): executed = self.client.execute('''mutation{ createCourse(courseData: { name: "Курс 2", started: "2019-05-01T15:12:04+03:00", teacher: "teacher" }) { course { name started teacher { username } } } } ''') self.assertEqual( executed, { "data": { "createCourse": { "course": { "name": "Курс 2", "started": "2019-05-01T15:12:04+03:00", "teacher": { "username": "******" } } } } }) def test_update_course(self): executed = self.client.execute("""mutation { updateCourse(courseData:{ """ + f'id: {self.course.id}' + """ name: "Новое название курса", started: "2019-05-01T15:12:04+03:00", teacher: "teacher" }) { course{ name started teacher { username } } } }""") self.assertEqual( executed, { "data": { "updateCourse": { "course": { "name": "Новое название курса", "started": "2019-05-01T15:12:04+03:00", "teacher": { "username": "******" } } } } })
class QueryRooms(BaseTestCase): def test_db_rooms_query(self): self.base_url = 'https://127.0.0.1:5000/_healthcheck' self.client = Client(healthcheck_schema) query_rooms = self.client.execute(db_rooms_query) self.assertEquals(query_rooms, db_rooms_query_response)
def test_schema(app, snapshot, mutation, query): client = Client(schema) with app.app_context(): snapshot.assert_match(client.execute(mutation)) with app.app_context(): snapshot.assert_match(client.execute(query))
def test_add_file(self, mock_create_dataset): """Test adding a new file to a dataset""" class DummyContext(object): def __init__(self, file_handle): self.dataset_loader = None self.labbook_loader = None self.files = {'uploadChunk': file_handle} def dispatcher_mock(self, function_ref, kwargs, metadata): assert kwargs['logged_in_username'] == 'default' assert kwargs['logged_in_email'] == '*****@*****.**' assert kwargs['dataset_owner'] == 'default' assert kwargs['dataset_name'] == 'dataset1' # Inject mocked config file kwargs['config_file'] = mock_create_dataset[0] # Stop patching so job gets scheduled for real dispatcher_patch.stop() # Call same method as in mutation d = Dispatcher() kwargs['dispatcher'] = Dispatcher res = d.dispatch_task(gtmcore.dispatcher.dataset_jobs. complete_dataset_upload_transaction, kwargs=kwargs, metadata=metadata) return res client = Client(mock_create_dataset[3], middleware=[DataloaderMiddleware()]) # Create file to upload test_file = os.path.join(tempfile.gettempdir(), "myValidFile.dat") est_size = 9000000 try: os.remove(test_file) except: pass with open(test_file, 'wb') as tf: tf.write(os.urandom(est_size)) new_file_size = os.path.getsize(tf.name) # Get upload params chunk_size = 4194000 file_info = os.stat(test_file) file_size = int(file_info.st_size / 1000) total_chunks = int(math.ceil(file_info.st_size / chunk_size)) ds = InventoryManager(mock_create_dataset[0]).load_dataset( 'default', 'default', 'dataset1') fsc = HostFilesystemCache(ds, 'default') target_file = os.path.join(fsc.current_revision_dir, "myValidFile.dat") txid = "000-unitest-transaction" with open(test_file, 'rb') as tf: # Check for file to exist (shouldn't yet) assert os.path.exists(target_file) is False for chunk_index in range(total_chunks): # Upload a chunk chunk = io.BytesIO() chunk.write(tf.read(chunk_size)) chunk.seek(0) file = FileStorage(chunk) query = f""" mutation addDatasetFile{{ addDatasetFile(input:{{owner:"default", datasetName: "dataset1", filePath: "myValidFile.dat", transactionId: "{txid}", chunkUploadParams:{{ uploadId: "fdsfdsfdsfdfs", chunkSize: {chunk_size}, totalChunks: {total_chunks}, chunkIndex: {chunk_index}, fileSize: "{file_size}", filename: "{os.path.basename(test_file)}" }} }}) {{ newDatasetFileEdge {{ node{{ id key isDir size }} }} }} }} """ r = client.execute(query, context_value=DummyContext(file)) assert 'errors' not in r # So, these will only be populated once the last chunk is uploaded. Will be None otherwise. assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][ 'isDir'] is False assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][ 'key'] == 'myValidFile.dat' assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][ 'size'] == f"{new_file_size}" # When done uploading, file should exist in the labbook assert os.path.exists(target_file) assert os.path.isfile(target_file) complete_query = f""" mutation completeQuery {{ completeDatasetUploadTransaction(input: {{ owner: "default", datasetName: "dataset1", transactionId: "{txid}" }}) {{ backgroundJobKey }} }} """ # Patch dispatch_task so you can inject the mocked config file dispatcher_patch = patch.object(Dispatcher, 'dispatch_task', dispatcher_mock) dispatcher_patch.start() r = client.execute(complete_query, context_value=DummyContext(None)) assert 'errors' not in r job_query = f""" {{ jobStatus(jobId: "{r['data']['completeDatasetUploadTransaction']['backgroundJobKey']}") {{ status result status jobMetadata failureMessage startedAt finishedAt }} }} """ cnt = 0 while cnt < 20: job_result = client.execute(job_query, context_value=DummyContext(None)) assert 'errors' not in job_result if job_result['data']['jobStatus']['status'] == 'finished': break time.sleep(.25) assert cnt < 20 metadata = json.loads(job_result['data']['jobStatus']['jobMetadata']) assert metadata['percent_complete'] == 100 assert metadata[ 'feedback'] == 'Please wait while file contents are analyzed. 9 MB of 9 MB complete...' # Verify file was added and repo is clean m = Manifest(ds, 'default') status = m.status() assert len(status.created) == 0 assert len(status.modified) == 0 assert len(status.deleted) == 0 assert 'Uploaded 1 new file(s)' in ds.git.log()[0]['message']
def step_impl(context): context.url = '/graphql' client = Client(schema) context.response = client.execute(context.text, context=Context(user=context.user)) context.response_json = context.response
class TestGetAutomationTaskFiles(unittest.TestCase): def setUp(self): self.client = Client(root_schema) # Note order of calls must match those made to S3 , and copy is used since the object may be mutated # (TODO should this ne forbiddien??) @mock.patch('graphql_api.data_s3.BaseS3Data._read_object', side_effect=[copy(AUTO_TASK), copy(FILE_REL0), copy(FILE0)]) #opy(FILE1), def test_query_with_files(self, mocked_api): qry = ''' query q1 { node(id:"UnVwdHVyZUdlbmVyYXRpb25UYXNrOjB6SEo0NTA=") { __typename ... on AutomationTask { id files { total_count edges { node { __typename ... on FileRelation { role file { ... on InversionSolution { id } } } } } } } } } ''' print(qry) executed = self.client.execute(qry) print(executed) result = executed['data']['node'] # print("RESULT", result ) assert result['id'] == 'QXV0b21hdGlvblRhc2s6MHpISjQ1MA==' assert result['files']['total_count'] == 1 assert result['files']['edges'][0]['node']['file'][ 'id'] == 'SW52ZXJzaW9uU29sdXRpb246MC4wbXFjN2Y=' assert mocked_api.call_count == 3 # this may break if caching or other optimisitions are introduced @mock.patch('graphql_api.data_s3.BaseS3Data._read_object', side_effect=[ copy(AUTO_TASK), copy(FILE_REL0), copy(FILE0), copy(AUTO_TASK), None ]) def test_task_product_query(self, mocked_api): qry = ''' query q0 { nodes(id_in: ["UnVwdHVyZUdlbmVyYXRpb25UYXNrOjB6SEo0NTA="]) { ok result { edges { node { __typename ... on AutomationTask { id created inversion_solution { id file_name } files { total_count } } } } } } }''' print(qry) executed = self.client.execute(qry) print(executed) node = executed['data']['nodes']['result']['edges'][0]['node'] assert node['id'] == 'QXV0b21hdGlvblRhc2s6MHpISjQ1MA==' assert node['files']['total_count'] == 1 assert node['inversion_solution'][ 'id'] == "SW52ZXJzaW9uU29sdXRpb246MC4wbXFjN2Y=" assert node['inversion_solution']['file_name'] == "solution.zip" assert mocked_api.call_count == 3 # this may break if caching or other optimisitions are introduced #@skip('as above') @mock.patch('graphql_api.data_s3.BaseS3Data._read_object', side_effect=[ json.loads(ate.automation_task), json.loads(ate.file_rel), json.loads(ate.file) ]) def test_example_failing_product_query(self, mocked_api): qry = ''' query q0 { nodes(id_in: ["UnVwdHVyZUdlbmVyYXRpb25UYXNrOjB6SEo0NTA="]) { ok result { edges { node { __typename ... on AutomationTask { id created inversion_solution { id file_name } files { total_count } } } } } } }''' print(qry) executed = self.client.execute(qry) print(executed) node = executed['data']['nodes']['result']['edges'][0]['node'] assert node['id'] == 'QXV0b21hdGlvblRhc2s6ODQ5N0tOTEI=' assert node['files']['total_count'] == 4 assert node['inversion_solution'][ 'id'] == "SW52ZXJzaW9uU29sdXRpb246MTczMC4wa3BjS0s=" assert node['inversion_solution'][ 'file_name'] == "NZSHM22_InversionSolution-QXV0b21hdGlvblRhc2s6ODQ5N0tOTEI=.zip" assert mocked_api.call_count == 3 # this may break if caching or other optimisitions are introduced
def test_getUpdates(self): """Testing getUpdates query.""" release = base.BaseTestCaseMixin.create_release(self, version='22') self.create_update(build_nvrs=['TurboGears-2.1-1.el5'], release_name=release.name) up2 = self.create_update(build_nvrs=['freetype-2.10.2-1.fc32'], release_name=release.name) up2.alias = "FEDORA-2020-3223f9ec8b" up2.stable_days = 1 up2.date_approved = datetime.datetime(2019, 10, 13, 16, 16, 22, 438484) self.db.commit() client = Client(schema) executed = client.execute("""{ getUpdates(stableDays: 1, dateApproved: "2019-10-13 16:16:22.438484") { alias request unstableKarma }}""") assert executed == { "data": { "getUpdates": [{ "alias": "FEDORA-2020-3223f9ec8b", "request": "testing", "unstableKarma": -3 }] } } executed = client.execute("""{ getUpdates(stableKarma: 3, status: "pending", critpath: false, pushed: false, request:"testing"){ stableDays userId }}""") assert executed == { 'data': { 'getUpdates': [{ 'stableDays': 0, 'userId': 1 }, { 'stableDays': 0, 'userId': 1 }, { 'stableDays': 1, 'userId': 1 }] } } executed = client.execute("""{ getUpdates(stableDays: 1, unstableKarma: -3, alias: "FEDORA-2020-3223f9ec8b") { dateApproved request }}""") assert executed == { 'data': { 'getUpdates': [{ 'dateApproved': "2019-10-13 16:16:22.438484", 'request': 'testing' }] } } executed = client.execute("""{ getUpdates(critpath: false, stableDays: 1, userId: 1){ request unstableKarma }}""") assert executed == { 'data': { 'getUpdates': [{ 'request': 'testing', 'unstableKarma': -3, }] } } executed = client.execute("""{ getUpdates(releaseName: "F22"){ request }}""") assert executed == { 'data': { 'getUpdates': [{ 'request': 'testing', }, { 'request': 'testing', }] } }
class DishSchemaTest(TestCase): def setUp(self): self.client = Client(schema=schema) def tearDown(self): Dish.objects.all().delete() def test_query_dish(self): self.obj = Dish.objects.create(name='Burrata') query = ''' query getDish($id: Int!) { dish(id: $id) { name } } ''' result = self.client.execute(query, variables={'id': self.obj.pk}) self.assertEqual(result['data']['dish']['name'], 'Burrata') def test_query_dishes(self): dishes = list((Dish.objects.create(name=f'item_{_}') for _ in range(5))) query = ''' query { dishes { name } }''' result = self.client.execute('''query { dishes {name} }''') self.assertEqual(len(result['data']['dishes']), 5) def test_mutation_create_dish(self): mutation = ''' mutation createDish($name: String!) { createDish(name: $name) { dish { name } ok } }''' result = self.client.execute(mutation, variables={'name': 'Burrata'}) self.assertTrue(result['data']['createDish']['ok']) self.assertEqual(result['data']['createDish']['dish']['name'], 'Burrata') self.assertTrue(Dish.objects.exists()) self.assertEqual(Dish.objects.all().first().name, 'Burrata') def test_mutation_update_dish(self): self.obj = Dish.objects.create(name='Burrata') self.assertTrue(Dish.objects.exists()) self.assertEqual(Dish.objects.all().first().name, 'Burrata') mutation = ''' mutation updateDish($id: Int!, $name: String!) { updateDish(id: $id, name: $name) { dish { id, name } ok } }''' result = self.client.execute(mutation, variables={'id': self.obj.id, 'name': 'Burrata 2'}) self.assertTrue(result['data']['updateDish']['ok']) self.assertEqual(result['data']['updateDish']['dish']['id'], self.obj.id) self.assertEqual(result['data']['updateDish']['dish']['name'], 'Burrata 2') self.assertEqual(Dish.objects.all().first().name, 'Burrata 2') def test_mutation_delete_dish(self): dishes = list((Dish.objects.create(name=f'item_{_}') for _ in range(3))) self.assertTrue(Dish.objects.exists()) self.assertEqual(Dish.objects.all().count(), 3) mutation = ''' mutation deleteDish($id: Int!) { deleteDish(id: $id) { dish { id, name } ok } }''' result = self.client.execute(mutation, variables={'id': dishes[2].id}) self.assertTrue(result['data']['deleteDish']['ok']) self.assertEqual(result['data']['deleteDish']['dish']['name'], 'item_2') self.assertEqual(Dish.objects.all().count(), 2)
class VoiceBelongsUserTests(TestCase): """Test using mfcc and fastwd for voice recognition and authentication""" def setUp(self): self.client = Client(schema) self.query = ''' query voiceBelongsResident( $cpf: String!, $audioSpeakingPhrase: [Float]!, $audioSamplerate: Int ){ voiceBelongsResident( cpf: $cpf, audioSpeakingPhrase: $audioSpeakingPhrase, audioSamplerate: $audioSamplerate ) } ''' self.residents = [ 'aline', 'felipe', 'marcos', 'mateus', 'paulo', 'pedro', 'samuel', 'sergio', 'silva', 'victor', 'vitor' ] def compute_accuracy(self, file_suffix): ''' Calculate the "speaker identification" hit ratio for truly speakers (voice really belongs to resident) ''' matches = 0.0 for resident in self.residents: samplerate, data = read('accounts/tests/audios/' + resident + file_suffix) response = self.client.execute( self.query, variables={ 'cpf': resident, 'audioSpeakingPhrase': data.tolist(), 'audioSamplerate': samplerate } ) if response["data"]["voiceBelongsResident"] == True: matches = matches + 1.0 return (matches / len(self.residents)) * 100 def test_accuracy_for_clean_samples(self): ''' Calculate hit ratio for clean audio samples ''' hit_ratio = self.compute_accuracy('_clean.wav') # accuracy must be greater equal 90% self.assertGreaterEqual(hit_ratio, 90.0) def test_accuracy_for_noised_samples(self): ''' Calculate hit ratio for noised audio samples ''' hit_ratio = self.compute_accuracy('_noised.wav') # accuracy must be greater equal 53% self.assertGreaterEqual(hit_ratio, 53.0) def test_impostors_rejection(self): samplerate, data = read('accounts/tests/audios/impostor.wav') rejections = 0.0 for resident in self.residents: response = self.client.execute( self.query, variables={ 'cpf': resident, 'audioSpeakingPhrase': data.tolist(), 'audioSamplerate': samplerate } ) if response["data"]["voiceBelongsResident"] == False: rejections = rejections + 1.0 # accuracy must be greater equal 85% self.assertGreaterEqual((rejections / len(self.residents)) * 100, 60.0) def test_nonexistent_cpf_except(self): response = self.client.execute( self.query, variables={ 'cpf': '1111111111', 'audioSpeakingPhrase': [2.7 * x for x in range(32000)], 'audioSpeakingName': [2.7 * x for x in range(32000)], 'audioSamplerate': 16000 } ) self.assertIsNotNone(response['errors'])
def test_import_labbook(self, fixture_working_dir): """Test batch uploading, but not full import""" class DummyContext(object): def __init__(self, file_handle): self.labbook_loader = None self.files = {'uploadChunk': file_handle} client = Client(fixture_working_dir[3], middleware=[LabBookLoaderMiddleware()]) # Create a temporary labbook lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="test-export", description="Tester") # Create a largeish file in the dir with open(os.path.join(fixture_working_dir[1], 'testfile.bin'), 'wb') as testfile: testfile.write(os.urandom(9000000)) FileOperations.insert_file(lb, 'input', testfile.name) # Export labbook zip_file = export_labbook_as_zip(lb.root_dir, tempfile.gettempdir()) lb_dir = lb.root_dir # Get upload params chunk_size = 4194304 file_info = os.stat(zip_file) file_size = int(file_info.st_size / 1000) total_chunks = int(math.ceil(file_info.st_size/chunk_size)) with open(zip_file, 'rb') as tf: for chunk_index in range(total_chunks): chunk = io.BytesIO() chunk.write(tf.read(chunk_size)) chunk.seek(0) file = FileStorage(chunk) query = f""" mutation myMutation{{ importLabbook(input:{{ chunkUploadParams:{{ uploadId: "jfdjfdjdisdjwdoijwlkfjd", chunkSize: {chunk_size}, totalChunks: {total_chunks}, chunkIndex: {chunk_index}, fileSizeKb: {file_size}, filename: "{os.path.basename(zip_file)}" }} }}) {{ importJobKey buildImageJobKey }} }} """ result = client.execute(query, context_value=DummyContext(file)) assert "errors" not in result if chunk_index == total_chunks - 1: assert type(result['data']['importLabbook']['importJobKey']) == str assert type(result['data']['importLabbook']['buildImageJobKey']) == str assert "rq:job:" in result['data']['importLabbook']['importJobKey'] assert "rq:job:" in result['data']['importLabbook']['buildImageJobKey'] chunk.close()
class TestUserSchema(TestCase): def setUp(self): self.client = Client(schema, middleware=[passTokenTest]) self.request = RequestFactory().get('/') self.user = GuestUser.objects.signIn(token='') self.uid = self.user.id self.todoId = Todo.objects.create(text='xx', user=self.user).pk def test_create_todo(self): self.request.headers = {'authorization': ''} self.request.uid = self.uid query = """ mutation ($text:String, $isCompleted:Boolean) { createTodo(text:$text, isCompleted:$isCompleted) { id text } } """ response = self.client.execute(query, variables={'text': 'I am testing'}, context_value=self.request) assert 'errors' not in response assert response.get('data').get('createTodo').get( 'text') == 'I am testing' def test_update_todo(self): self.request.headers = {'authorization': ''} self.request.uid = self.uid query = """ mutation ($id:ID, $text:String, $isCompleted:Boolean) { updateTodo(id:$id, text:$text, isCompleted:$isCompleted) { id text } } """ response = self.client.execute(query, variables={ 'id': self.todoId, 'text': 'I am testing' }, context_value=self.request) assert 'errors' not in response assert response.get('data').get('updateTodo').get( 'text') == 'I am testing' def test_query_todo(self): self.request.headers = {'authorization': ''} self.request.uid = self.uid query = """ query { todos { id text } } """ response = self.client.execute(query, context_value=self.request) assert 'errors' not in response assert response.get('data').get('todos') is not None def test_delete_todo(self): self.request.headers = {'authorization': ''} self.request.uid = self.uid query = """ mutation ($id:ID) { deleteTodo(id:$id) { id } } """ response = self.client.execute(query, variables={'id': self.todoId}, context_value=self.request) assert 'errors' not in response assert response.get('data').get('deleteTodo').get('id') == str( self.todoId)
class TestDjangoFormQuery: @pytest.fixture(autouse=True) def setup_schema(self): class FormQueryForm(forms.Form): my_field = forms.CharField(max_length=5) class FormQuery(DjangoFormQuery): class Meta: form_class = FormQueryForm response = graphene.String() @classmethod def perform_query(cls, form, info): return cls( response= f"hello there from query {form.cleaned_data['my_field']}") class Queries(graphene.ObjectType): form_query = FormQuery.Field() self.schema = graphene.Schema(query=Queries) self.client = Client(self.schema) def request(self, input): response = jsonify( self.client.execute(''' query MyQuery($input: FormQueryInput!) { formQuery(input: $input) { errors { field, extendedMessages { message, code } }, response } } ''', variables={'input': input}, context=create_fake_request())) return response['data']['formQuery'] def test_it_works(self): assert self.request({'myField': 'boop'}) == { 'errors': [], 'response': 'hello there from query boop' } def test_it_returns_errors(self): assert self.request({'myField': 'boooop'}) == { 'errors': [{ 'field': 'myField', 'extendedMessages': [{ 'code': 'max_length', 'message': 'Ensure this value has at most 5 characters (it has 6).' }] }], 'response': None }
def test_query_all_people_counter(self): client = Client(schema) executed = client.execute(query_all_people_counter) self.assertMatchSnapshot(executed)