def test_create_post(self): client = Client(self.schema) executed = client.execute( ''' mutation { createPost( data: { workoutDate: "2017-01-10T21:33:15.233Z", remark: "latest" }, performances: [{ event: 17, value: 20, set1: 10 }, { event: 48, value: 10, set1: 20 }] ) { post { id workoutDate remark performances { event { name } value set1 } } } } ''', context_value=MockContext(self.user), ) post = executed.get("data").get('createPost').get("post") self.assertIsNotNone(post)
def test_query_events(self): self.create() client = Client(self.schema) executed = client.execute( ''' query { events(name: "test") { name } } ''', context_value=MockContext(self.user), ) self.assertTrue(len(executed.get('data')) > 0)
def test_query_event(self): self.create() client = Client(self.schema) executed = client.execute( ''' query { event(id: %s) { name } } ''' % self.event.id, context_value=MockContext(self.user), ) event = executed.get('data').get('event') self.assertIsNotNone(event)
def test_delete_gallery_by_gallery_id(db: SQLAlchemy, graphene_client: Client): gallery = Gallery(name="myGallery") db.session.add(gallery) db.session.commit() assert_that(Gallery.query.get(1)).is_not_none() result = graphene_client.execute(''' mutation { deleteGallery(galleryId: 1) { gallery { galleryId, name } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "deleteGallery": { "gallery": { "galleryId": 1, "name": "myGallery" } } } }''') assert_that(Gallery.query.get(1)).is_none()
def test_create_event(self): client = Client(self.schema) executed = client.execute( ''' mutation { createEvent(data: {name: "test", unit: "KG", value: 10, remark: "test-remark"}) { event { id } } } ''', context_value=MockContext(self.user), ) event = executed.get("data").get('createEvent').get("event") self.assertIsNotNone(event)
def test_delete_image_by_image_id(db: SQLAlchemy, graphene_client: Client): image = Image(name="myImage") db.session.add(image) db.session.commit() assert_that(Image.query.get(1)).is_not_none() result = graphene_client.execute(''' mutation { deleteImage(imageId: 1) { image { imageId, name } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "deleteImage": { "image": { "imageId": 1, "name": "myImage" } } } }''') assert_that(Image.query.get(1)).is_none()
def test_create_image(graphene_client: Client): result = graphene_client.execute(''' mutation { createImage(name: "myImage") { image { imageId name public keywords } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "createImage": { "image": { "imageId": 1, "name": "myImage", "public": false, "keywords": [] } } } } ''') image = Image.query.get(1) assert_that(image.id).is_equal_to(1) assert_that(image.name).is_equal_to('myImage') assert_that(image.public).is_false() assert_that(image.keywords).is_empty()
def test_create_image_with_keywords(graphene_client: Client): result = graphene_client.execute(''' mutation { createImage(name: "myImage", keywords: ["foo", "bar", "baz"]) { image { keywords } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "createImage": { "image": { "keywords": [ "foo", "bar", "baz" ] } } } } ''') image = Image.query.get(1) assert_that([keyword.keyword for keyword in image.keywords]).contains_sequence("foo", "bar", "baz")
def test_create_gallery(graphene_client: Client): result = graphene_client.execute(''' mutation { createGallery(name: "galleryName", public: true, position: 78) { gallery { galleryId name public position } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "createGallery": { "gallery": { "galleryId": 1, "name": "galleryName", "public": true, "position": 78 } } } }''') gallery = Gallery.query.get(1) assert_that(gallery.id).is_equal_to(1) assert_that(gallery.name).is_equal_to('galleryName') assert_that(gallery.public).is_true() assert_that(gallery.position).is_equal_to(78)
def test_query_gallery_by_id_wrong_type(graphene_client: Client): result = graphene_client.execute(f''' query {{ gallery(id: "{to_global_id("Image", 1)}") {{ galleryId }} }} ''') assert_that(result['errors'][0]['message']).is_equal_to("Wrong id type (expected 'Gallery', got 'Image'")
def test_query_gallery_no_id(graphene_client: Client): result = graphene_client.execute(''' query { gallery { galleryId } } ''') assert_that(result['errors'][0]['message']).is_equal_to('Either id or gallery_id must be specified')
def test_query_post(self): self.create() client = Client(self.schema) executed = client.execute( ''' query { post(id: %s) { workoutDate performances { id event { name } } } } ''' % self.post.id, context_value=MockContext(self.user), ) post = executed.get('data').get('post') self.assertIsNotNone(post)
def test_query_nonexistent_gallery(graphene_client: Client): result = graphene_client.execute(''' query { gallery(galleryId: 1) { galleryId } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "gallery": null } } ''')
def test_delete_nonexistent_gallery(graphene_client: Client): result = graphene_client.execute(''' mutation { deleteGallery(galleryId: 1) { gallery { galleryId, name } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "deleteGallery": { "gallery": null } } }''')
def test_delete_nonexistent_image(graphene_client: Client): result = graphene_client.execute(''' mutation { deleteImage(imageId: 1) { image { imageId, name } } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "deleteImage": { "image": null } } }''')
def test_query_gallery_by_id(db: SQLAlchemy, graphene_client: Client): gallery = Gallery() db.session.add(gallery) db.session.commit() result = graphene_client.execute(f''' query {{ gallery(id: "{to_global_id("Gallery", 1)}") {{ galleryId }} }} ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "gallery": { "galleryId": 1 } } } ''')
def test_query_gallery(db: SQLAlchemy, graphene_client: Client): gallery = Gallery(name="myGallery", public=False) db.session.add(gallery) db.session.commit() result = graphene_client.execute(''' query { gallery(galleryId: 1) { galleryId name public } } ''') assert_that(json.dumps(result)).is_equal_to_ignoring_whitespace(''' { "data": { "gallery": { "galleryId": 1, "name": "myGallery", "public": false } } } ''')
def setUp(self): schema = create_schema() self.client = Client(schema)
class CompanyApiTests(MongoTestCase): """Tests the APIs for the People entity""" def setUp(self): super().setUp() self.client = Client(schema) # Populate the test collectiom with person data with open("core/tests/test_data/people.json") as people_json: peoples = json.load(people_json) for people in peoples: people_obj = Person(**people) people_obj.save() # Populate the food collection from test data file with open("core/tests/test_data/food.json") as food_json: food_list = json.load(food_json) for food in food_list: food_obj = Food(**food) food_obj.save() def test_common_friends_invalid_input(self): """Tests if the API returns an error when the number of ids sent is less than 2 for the lookup""" response = self.client.execute(''' { commonFriends(id: [1]) { index, name, age } }''') self.assertEqual(response["errors"][0]["message"], "Lookup needs atleast two index values") def test_common_friends_no_common_friends(self): """Tests if the API returns an error if it is unable to find any common friends""" response = self.client.execute(''' { commonFriends(id: [0,1,2]) { index, name, age } }''') self.assertEqual(response["errors"][0]["message"], "No common friends found") def test_common_friends_for_two_ids(self): """Tests if the API returns the right common friends when it is sent 2 lookup ids""" valid_common_friends = [0] response = self.client.execute(''' { commonFriends(id: [1,2]) { index } }''') response_common_friends = [friend['index'] for friend in response['data']['commonFriends']] self.assertTrue(valid_common_friends == response_common_friends) def test_common_friends_for_more_than_two_ids(self): """Tests if the API returns the right common friends when it is sent more than 2 lookup ids""" valid_common_friends = [0, 2] response = self.client.execute(''' { commonFriends(id: [1,3,4,5]) { index } }''') response_common_friends = [friend['index'] for friend in response['data']['commonFriends']] self.assertTrue(valid_common_friends == response_common_friends) def test_favourite_food_invalid_id(self): """Tests if API returns an error for an invalid person index value""" response = self.client.execute(''' { favouriteFood(id: 10001){ username, age, fruits, vegetables } }''') self.assertEqual(response["errors"][0]["message"], "Person index not found") def test_favourite_food_valid(self): """Tests if the API returns a valid response for a valid person index""" valid_veggies = ["celery", "carrot"] valid_fruits = ["apple", "orange"] name = "Rosemary Hayes" age = 30 response = self.client.execute(''' { favouriteFood(id: 3){ username, age, fruits, vegetables } }''') favouriteFood = response['data']['favouriteFood'] self.assertTrue( favouriteFood['vegetables'] == valid_veggies and favouriteFood['fruits'] == valid_fruits and favouriteFood['username'] == name and favouriteFood['age'] == age )
def client(): yield Client(server.schema)
def fixture_working_dir_populated_scoped(): """A pytest fixture that creates a temporary working directory, a config file to match, creates the schema, and populates the environment component repository. Class scope modifier attached """ # Create temp dir config_file, temp_dir = _create_temp_work_dir() # Create user identity insert_cached_identity(temp_dir) # Create test client schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations) # Create a bunch of lab books im = InventoryManager(config_file) im.create_labbook('default', 'default', "labbook1", description="Cats labbook 1") time.sleep(1.1) im.create_labbook('default', 'default', "labbook2", description="Dogs labbook 2") time.sleep(1.1) im.create_labbook('default', 'default', "labbook3", description="Mice labbook 3") time.sleep(1.1) im.create_labbook('default', 'default', "labbook4", description="Horses labbook 4") time.sleep(1.1) im.create_labbook('default', 'default', "labbook5", description="Cheese labbook 5") time.sleep(1.1) im.create_labbook('default', 'default', "labbook6", description="Goat labbook 6") time.sleep(1.1) im.create_labbook('default', 'default', "labbook7", description="Turtle labbook 7") time.sleep(1.1) im.create_labbook('default', 'default', "labbook8", description="Lamb labbook 8") time.sleep(1.1) im.create_labbook('default', 'default', "labbook9", description="Taco labbook 9") time.sleep(1.1) im.create_labbook('test3', 'test3', "labbook-0", description="This should not show up.") with patch.object(Configuration, 'find_default_config', lambda self: config_file): # Load User identity into app context app = Flask("lmsrvlabbook") app.config["LABMGR_CONFIG"] = Configuration() app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration()) with app.app_context(): # within this block, current_app points to app. Set current user explicitly (this is done in the middleware) flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile() # Create a test client client = Client(schema, middleware=[DataloaderMiddleware()], context_value=ContextMock()) yield config_file, temp_dir, client, schema # Remove the temp_dir shutil.rmtree(temp_dir)
def test_schema(app, snapshot, mutation, query): client = Client(schema) with app.app_context(): snapshot.assert_match(client.execute(mutation)) with app.app_context(): snapshot.assert_match(client.execute(query))
def setUp(self): self.schema = Schema(query=Query, mutation=Mutation) self.client = Client(self.schema) self.user1 = User.objects.create(username="******") self.user2 = User.objects.create(username="******") self.unauthorised_user = AnonymousUser()
def step_impl(context): context.url = '/graphql' client = Client(schema) context.response = client.execute(context.text, context=Context(user=context.user)) context.response_json = context.response
class DishSchemaTest(TestCase): def setUp(self): self.client = Client(schema=schema) def tearDown(self): Dish.objects.all().delete() def test_query_dish(self): self.obj = Dish.objects.create(name='Burrata') query = ''' query getDish($id: Int!) { dish(id: $id) { name } } ''' result = self.client.execute(query, variables={'id': self.obj.pk}) self.assertEqual(result['data']['dish']['name'], 'Burrata') def test_query_dishes(self): dishes = list((Dish.objects.create(name=f'item_{_}') for _ in range(5))) query = ''' query { dishes { name } }''' result = self.client.execute('''query { dishes {name} }''') self.assertEqual(len(result['data']['dishes']), 5) def test_mutation_create_dish(self): mutation = ''' mutation createDish($name: String!) { createDish(name: $name) { dish { name } ok } }''' result = self.client.execute(mutation, variables={'name': 'Burrata'}) self.assertTrue(result['data']['createDish']['ok']) self.assertEqual(result['data']['createDish']['dish']['name'], 'Burrata') self.assertTrue(Dish.objects.exists()) self.assertEqual(Dish.objects.all().first().name, 'Burrata') def test_mutation_update_dish(self): self.obj = Dish.objects.create(name='Burrata') self.assertTrue(Dish.objects.exists()) self.assertEqual(Dish.objects.all().first().name, 'Burrata') mutation = ''' mutation updateDish($id: Int!, $name: String!) { updateDish(id: $id, name: $name) { dish { id, name } ok } }''' result = self.client.execute(mutation, variables={'id': self.obj.id, 'name': 'Burrata 2'}) self.assertTrue(result['data']['updateDish']['ok']) self.assertEqual(result['data']['updateDish']['dish']['id'], self.obj.id) self.assertEqual(result['data']['updateDish']['dish']['name'], 'Burrata 2') self.assertEqual(Dish.objects.all().first().name, 'Burrata 2') def test_mutation_delete_dish(self): dishes = list((Dish.objects.create(name=f'item_{_}') for _ in range(3))) self.assertTrue(Dish.objects.exists()) self.assertEqual(Dish.objects.all().count(), 3) mutation = ''' mutation deleteDish($id: Int!) { deleteDish(id: $id) { dish { id, name } ok } }''' result = self.client.execute(mutation, variables={'id': dishes[2].id}) self.assertTrue(result['data']['deleteDish']['ok']) self.assertEqual(result['data']['deleteDish']['dish']['name'], 'item_2') self.assertEqual(Dish.objects.all().count(), 2)
class TestGetAutomationTaskFiles(unittest.TestCase): def setUp(self): self.client = Client(root_schema) # Note order of calls must match those made to S3 , and copy is used since the object may be mutated # (TODO should this ne forbiddien??) @mock.patch('graphql_api.data_s3.BaseS3Data._read_object', side_effect=[copy(AUTO_TASK), copy(FILE_REL0), copy(FILE0)]) #opy(FILE1), def test_query_with_files(self, mocked_api): qry = ''' query q1 { node(id:"UnVwdHVyZUdlbmVyYXRpb25UYXNrOjB6SEo0NTA=") { __typename ... on AutomationTask { id files { total_count edges { node { __typename ... on FileRelation { role file { ... on InversionSolution { id } } } } } } } } } ''' print(qry) executed = self.client.execute(qry) print(executed) result = executed['data']['node'] # print("RESULT", result ) assert result['id'] == 'QXV0b21hdGlvblRhc2s6MHpISjQ1MA==' assert result['files']['total_count'] == 1 assert result['files']['edges'][0]['node']['file'][ 'id'] == 'SW52ZXJzaW9uU29sdXRpb246MC4wbXFjN2Y=' assert mocked_api.call_count == 3 # this may break if caching or other optimisitions are introduced @mock.patch('graphql_api.data_s3.BaseS3Data._read_object', side_effect=[ copy(AUTO_TASK), copy(FILE_REL0), copy(FILE0), copy(AUTO_TASK), None ]) def test_task_product_query(self, mocked_api): qry = ''' query q0 { nodes(id_in: ["UnVwdHVyZUdlbmVyYXRpb25UYXNrOjB6SEo0NTA="]) { ok result { edges { node { __typename ... on AutomationTask { id created inversion_solution { id file_name } files { total_count } } } } } } }''' print(qry) executed = self.client.execute(qry) print(executed) node = executed['data']['nodes']['result']['edges'][0]['node'] assert node['id'] == 'QXV0b21hdGlvblRhc2s6MHpISjQ1MA==' assert node['files']['total_count'] == 1 assert node['inversion_solution'][ 'id'] == "SW52ZXJzaW9uU29sdXRpb246MC4wbXFjN2Y=" assert node['inversion_solution']['file_name'] == "solution.zip" assert mocked_api.call_count == 3 # this may break if caching or other optimisitions are introduced #@skip('as above') @mock.patch('graphql_api.data_s3.BaseS3Data._read_object', side_effect=[ json.loads(ate.automation_task), json.loads(ate.file_rel), json.loads(ate.file) ]) def test_example_failing_product_query(self, mocked_api): qry = ''' query q0 { nodes(id_in: ["UnVwdHVyZUdlbmVyYXRpb25UYXNrOjB6SEo0NTA="]) { ok result { edges { node { __typename ... on AutomationTask { id created inversion_solution { id file_name } files { total_count } } } } } } }''' print(qry) executed = self.client.execute(qry) print(executed) node = executed['data']['nodes']['result']['edges'][0]['node'] assert node['id'] == 'QXV0b21hdGlvblRhc2s6ODQ5N0tOTEI=' assert node['files']['total_count'] == 4 assert node['inversion_solution'][ 'id'] == "SW52ZXJzaW9uU29sdXRpb246MTczMC4wa3BjS0s=" assert node['inversion_solution'][ 'file_name'] == "NZSHM22_InversionSolution-QXV0b21hdGlvblRhc2s6ODQ5N0tOTEI=.zip" assert mocked_api.call_count == 3 # this may break if caching or other optimisitions are introduced
def client(): return Client(schema)
def test_directory(log_dir): from ml_dash.config import Args Args.logdir = log_dir client = Client(schema) query = """ query AppQuery ($id: ID!) { directory ( id: $id ) { id name path readme { id name path text(stop:11) } dashConfigs(first:10) { edges { node { id name path yaml text(stop:11) } } } charts(first:10) { edges { node { id name dir path yaml text(stop:11) } } } directories (first:10) { edges { node { id name path directories (first:10) { edges { node { id name } } } } } } experiments (first:10) { edges { node { id name path parameters {keys flat} files (first:10) { edges { node { id, name} } } } } } } } """ path = "/episodeyang/cpc-belief/mdp" r = client.execute(query, variables=dict(id=to_global_id("Directory", path))) if 'errors' in r: raise RuntimeError(r['errors']) else: print(">>") show(r['data'])
def setUp(self): self.site = wagtail_factories.SiteFactory( hostname="grapple.localhost", site_name="Grapple test site") self.client = Client(SCHEMA)
import graphene from graphene.test import Client from graphql_lib.post_queries import Query from graphql_lib.post_mutations import Mutation from posts.post_logic_router import PostLogic post_logic = PostLogic() client = Client(schema=graphene.Schema(query=Query, mutation=Mutation), context_value={"post_logic": post_logic}) def test_create_post(): post_logic.reset_posts() author = "me myself and i" query = """ mutation createPost{createPost(post:{ title:"sdffadfsadfsfasdssd", content:"ssfsdfs", published: true, author: \"""" + author + """\" }){ author }} """ result = client.execute(query) assert result['data']['createPost']['author'] == author def test_get_post_list(): query = """
def test_query_get_people_counter(self): client = Client(schema) executed = client.execute(query_get_people_counter, variables={'room': 'room_a'}) self.assertMatchSnapshot(executed)
def test_executions(self): project_name = 'unittesting' query = """ query { forcesExecutions( projectName: "unittesting", fromDate: "2020-02-01T00:00:00Z", toDate: "2020-02-28T23:59:59Z" ) { executions { projectName identifier date exitCode gitBranch gitCommit gitOrigin gitRepo kind log strictness vulnerabilities { exploits { kind who where } mockedExploits { kind who where } acceptedExploits { kind who where } numOfVulnerabilitiesInExploits numOfVulnerabilitiesInMockedExploits numOfVulnerabilitiesInAcceptedExploits } } } } """ testing_client = Client(SCHEMA) result = self._get_result(query, testing_client) executions = result['data']['forcesExecutions']['executions'] assert not result.get('errors') assert executions[0]['projectName'] == project_name assert executions[0][ 'identifier'] == '33e5d863252940edbfb144ede56d56cf' assert executions[0]['date'] == '2020-02-19T19:31:18+00:00' assert executions[0]['exitCode'] == '1' assert executions[0]['gitBranch'] == 'master' assert executions[0][ 'gitCommit'] == '6e7b34c1358db2ff4123c3c76e7fe3bf9f2838f6' assert executions[0]['gitOrigin'] == 'http://test.com' assert executions[0]['gitRepo'] == 'Repository' assert executions[0]['kind'] == 'dynamic' assert len(executions[0]['log']) > 100 assert isinstance(executions[0]['log'], str) assert executions[0]['strictness'] == 'strict' assert executions[0]['vulnerabilities'] == { "exploits": [{ "kind": "DAST", "who": "https://test.com/test", "where": "HTTP/Implementation" }], "mockedExploits": [{ "kind": "DAST", "who": "https://test.com/test", "where": "HTTP/Implementation" }], "acceptedExploits": [{ "kind": "DAST", "who": "https://test.com/test/looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong", "where": "HTTP/Implementatioooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooon" }], "numOfVulnerabilitiesInExploits": 1, "numOfVulnerabilitiesInMockedExploits": 1, "numOfVulnerabilitiesInAcceptedExploits": 1 } assert executions[1]['projectName'] == project_name assert executions[1][ 'identifier'] == 'a125217504d447ada2b81da3e4bdab0e' assert executions[1]['date'] == '2020-02-19T19:04:33+00:00' assert executions[1]['exitCode'] == '0' assert executions[1]['gitBranch'] == 'awesomeFeature' assert executions[1][ 'gitCommit'] == '23c3c76e7fe3bf9f2838f66e7b34c1358db2ff41' assert executions[1]['gitOrigin'] == 'https://test.com/test' assert executions[1]['gitRepo'] == 'Repository' assert executions[1]['kind'] == 'static' assert len(executions[1]['log']) > 100 assert isinstance(executions[1]['log'], str) assert executions[1]['strictness'] == 'strict' assert executions[1]['vulnerabilities'] == { "exploits": [], "mockedExploits": [], "acceptedExploits": [{ "kind": "SAST", "who": "Repository/folder/folder/file.cs", "where": "55" }, { "kind": "SAST", "who": "Repository/folder/folder/file2.cs", "where": "3" }, { "kind": "SAST", "who": "Repository/folder/folder/file3.cs", "where": "8" }, { "kind": "SAST", "who": "Repository/folder/folder/file3.cs", "where": "9" }, { "kind": "SAST", "who": "Repository/folder/folder/file4.cs", "where": "10" }], "numOfVulnerabilitiesInExploits": 0, "numOfVulnerabilitiesInMockedExploits": 0, "numOfVulnerabilitiesInAcceptedExploits": 5 }
def test_query_all_people_counter(self): client = Client(schema) executed = client.execute(query_all_people_counter) self.assertMatchSnapshot(executed)
from graphene.test import Client from ..data import setup from ..schema import schema setup() client = Client(schema) def test_mutations(snapshot): query = """ mutation MyMutation { introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) { ship { id name } faction { name ships { edges { node { id name } } } } } }
def setUp(self): self.client = Client(root_schema)
def graphql_client(): return Client(graphql_schema)
def build_image_for_jupyterlab(): # Create temp dir config_file, temp_dir = _create_temp_work_dir() # Create user identity insert_cached_identity(temp_dir) # Create test client schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations) # get environment data and index erm = RepositoryManager(config_file) erm.update_repositories() erm.index_repositories() with patch.object(Configuration, 'find_default_config', lambda self: config_file): # Load User identity into app context app = Flask("lmsrvlabbook") app.config["LABMGR_CONFIG"] = Configuration() app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration()) with app.app_context(): # within this block, current_app points to app. Set current user explicitly (this is done in the middleware) flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile() # Create a test client client = Client( schema, middleware=[DataloaderMiddleware(), error_middleware], context_value=ContextMock()) # Create a labook im = InventoryManager(config_file) lb = im.create_labbook('default', 'unittester', "containerunittestbook", description="Testing docker building.") cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) cm.add_packages("pip3", [{ "manager": "pip3", "package": "requests", "version": "2.18.4" }]) ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) docker_client = get_docker_client() try: lb, docker_image_id = ContainerOperations.build_image( labbook=lb, username="******") # Note: The final field is the owner yield lb, ib, docker_client, docker_image_id, client, "unittester" finally: try: docker_client.containers.get(docker_image_id).stop() docker_client.containers.get(docker_image_id).remove() except: pass try: docker_client.images.remove(docker_image_id, force=True, noprune=False) except: pass shutil.rmtree(lb.root_dir)
def test_import_labbook(self, fixture_working_dir): """Test batch uploading, but not full import""" class DummyContext(object): def __init__(self, file_handle): self.labbook_loader = None self.files = {'uploadChunk': file_handle} client = Client(fixture_working_dir[3], middleware=[LabBookLoaderMiddleware()]) # Create a temporary labbook lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="test-export", description="Tester") # Create a largeish file in the dir with open(os.path.join(fixture_working_dir[1], 'testfile.bin'), 'wb') as testfile: testfile.write(os.urandom(9000000)) FileOperations.insert_file(lb, 'input', testfile.name) # Export labbook zip_file = export_labbook_as_zip(lb.root_dir, tempfile.gettempdir()) lb_dir = lb.root_dir # Get upload params chunk_size = 4194304 file_info = os.stat(zip_file) file_size = int(file_info.st_size / 1000) total_chunks = int(math.ceil(file_info.st_size/chunk_size)) with open(zip_file, 'rb') as tf: for chunk_index in range(total_chunks): chunk = io.BytesIO() chunk.write(tf.read(chunk_size)) chunk.seek(0) file = FileStorage(chunk) query = f""" mutation myMutation{{ importLabbook(input:{{ chunkUploadParams:{{ uploadId: "jfdjfdjdisdjwdoijwlkfjd", chunkSize: {chunk_size}, totalChunks: {total_chunks}, chunkIndex: {chunk_index}, fileSizeKb: {file_size}, filename: "{os.path.basename(zip_file)}" }} }}) {{ importJobKey buildImageJobKey }} }} """ result = client.execute(query, context_value=DummyContext(file)) assert "errors" not in result if chunk_index == total_chunks - 1: assert type(result['data']['importLabbook']['importJobKey']) == str assert type(result['data']['importLabbook']['buildImageJobKey']) == str assert "rq:job:" in result['data']['importLabbook']['importJobKey'] assert "rq:job:" in result['data']['importLabbook']['buildImageJobKey'] chunk.close()
def setUp(self): self.client = Client(SCHEMA)
def test_getUpdates(self): """Testing getUpdates query.""" release = base.BaseTestCaseMixin.create_release(self, version='22') self.create_update(build_nvrs=['TurboGears-2.1-1.el5'], release_name=release.name) up2 = self.create_update(build_nvrs=['freetype-2.10.2-1.fc32'], release_name=release.name) up2.alias = "FEDORA-2020-3223f9ec8b" up2.stable_days = 1 up2.date_approved = datetime.datetime(2019, 10, 13, 16, 16, 22, 438484) self.db.commit() client = Client(schema) executed = client.execute("""{ getUpdates(stableDays: 1, dateApproved: "2019-10-13 16:16:22.438484") { alias request unstableKarma }}""") assert executed == { "data": { "getUpdates": [{ "alias": "FEDORA-2020-3223f9ec8b", "request": "testing", "unstableKarma": -3 }] } } executed = client.execute("""{ getUpdates(stableKarma: 3, status: "pending", critpath: false, pushed: false, request:"testing"){ stableDays userId }}""") assert executed == { 'data': { 'getUpdates': [{ 'stableDays': 0, 'userId': 1 }, { 'stableDays': 0, 'userId': 1 }, { 'stableDays': 1, 'userId': 1 }] } } executed = client.execute("""{ getUpdates(stableDays: 1, unstableKarma: -3, alias: "FEDORA-2020-3223f9ec8b") { dateApproved request }}""") assert executed == { 'data': { 'getUpdates': [{ 'dateApproved': "2019-10-13 16:16:22.438484", 'request': 'testing' }] } } executed = client.execute("""{ getUpdates(critpath: false, stableDays: 1, userId: 1){ request unstableKarma }}""") assert executed == { 'data': { 'getUpdates': [{ 'request': 'testing', 'unstableKarma': -3, }] } } executed = client.execute("""{ getUpdates(releaseName: "F22"){ request }}""") assert executed == { 'data': { 'getUpdates': [{ 'request': 'testing', }, { 'request': 'testing', }] } }
def setUp(self): self.client = Client(schema=schema)
def setUp(self): self.client = Client(create_schema("server"))
class TestDjangoFormQuery: @pytest.fixture(autouse=True) def setup_schema(self): class FormQueryForm(forms.Form): my_field = forms.CharField(max_length=5) class FormQuery(DjangoFormQuery): class Meta: form_class = FormQueryForm response = graphene.String() @classmethod def perform_query(cls, form, info): return cls( response= f"hello there from query {form.cleaned_data['my_field']}") class Queries(graphene.ObjectType): form_query = FormQuery.Field() self.schema = graphene.Schema(query=Queries) self.client = Client(self.schema) def request(self, input): response = jsonify( self.client.execute(''' query MyQuery($input: FormQueryInput!) { formQuery(input: $input) { errors { field, extendedMessages { message, code } }, response } } ''', variables={'input': input}, context=create_fake_request())) return response['data']['formQuery'] def test_it_works(self): assert self.request({'myField': 'boop'}) == { 'errors': [], 'response': 'hello there from query boop' } def test_it_returns_errors(self): assert self.request({'myField': 'boooop'}) == { 'errors': [{ 'field': 'myField', 'extendedMessages': [{ 'code': 'max_length', 'message': 'Ensure this value has at most 5 characters (it has 6).' }] }], 'response': None }