def test_add_package(self, fixture_working_dir_env_repo_scoped, snapshot): """Test listing labbooks""" im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook('default', 'default', 'catbook-package-tester', description="LB to test package mutation") # Add a base image pkg_query = """ mutation myPkgMutation { addPackageComponents (input: { owner: "default", labbookName: "catbook-package-tester", packages: [{manager: "conda3", package: "python-coveralls", version: "2.9.1"}] }) { clientMutationId newPackageComponentEdges { node{ id manager package version fromBase } cursor } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(pkg_query))
def test_pagination_sort_az_reverse(self, fixture_working_dir_dataset_populated_scoped, snapshot): query = """ { datasetList{ localDatasets(orderBy: "name", sort: "desc") { edges { node { id name description datasetType{ storageType name description } } cursor } pageInfo { hasNextPage hasPreviousPage } } } } """ snapshot.assert_match(fixture_working_dir_dataset_populated_scoped[2].execute(query))
def test_pagination_sort_modified(self, fixture_working_dir_dataset_populated_scoped, snapshot): query = """ { datasetList{ localDatasets(orderBy: "modified_on", sort: "desc") { edges { node { id name description } cursor } pageInfo { hasNextPage hasPreviousPage } } } } """ snapshot.assert_match(fixture_working_dir_dataset_populated_scoped[2].execute(query)) im = InventoryManager(fixture_working_dir_dataset_populated_scoped[0]) ds = im.load_dataset("default", "default", "dataset4") with open(os.path.join(ds.root_dir, "test.txt"), 'wt') as tf: tf.write("asdfasdf") ds.git.add_all() ds.git.commit("Changing the repo") # Run query again snapshot.assert_match(fixture_working_dir_dataset_populated_scoped[2].execute(query))
def test_create_user_note_no_body(self, fixture_working_dir, snapshot): """Test creating and getting a user note""" # Create labbook lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="user-note-test", description="testing user notes") # Create a user note query = """ mutation makeUserNote { createUserNote(input: { owner: "default", labbookName: "user-note-test", title: "I think this is a thing" }) { newActivityRecordEdge { node{ message detailObjects{ data type show importance tags } type show importance tags } } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def test_remove_user_identity(self, fixture_working_dir, snapshot): query = """ { userIdentity{ id username email givenName familyName } } """ snapshot.assert_match(fixture_working_dir[2].execute(query)) identity_file = os.path.join(fixture_working_dir[1], '.labmanager', 'identity', 'user.json') assert os.path.exists(identity_file) is True query = """ mutation myRemoveMutation { removeUserIdentity(input:{}){ userIdentityEdge{ username } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query)) identity_file = os.path.join(fixture_working_dir[1], '.labmanager', 'identity', 'user.json') assert os.path.exists(identity_file) is False
def test_get_available_base_images(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the available base images""" query = """ { availableBases{ edges { node { id schema componentId name description readme tags icon osClass osRelease license url languages developmentTools dockerImageServer dockerImageNamespace dockerImageRepository dockerImageTag packageManagers installedPackages } } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query))
def test_get_base_image_by_node(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the available base images""" query = """ { node(id: "QmFzZUNvbXBvbmVudDpnaWdhbnR1bV9iYXNlLWltYWdlcy10ZXN0aW5nJnF1aWNrc3RhcnQtanVweXRlcmxhYiYx") { ... on BaseComponent { id componentId name description readme tags icon osClass osRelease license url languages developmentTools dockerImageServer dockerImageNamespace dockerImageRepository dockerImageTag packageManagers } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_get_available_custom_deps(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the available custom dependencies""" query = """ { availableCustomDependencies { edges { node { id schema componentId repository revision name description tags license url osBaseClass requiredPackageManagers dockerSnippet } } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_custom_counts(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the a LabBook's package manager dependencies""" # Create labbook lb = LabBook(fixture_working_dir_env_repo_scoped[0]) lb.new(owner={"username": "******"}, name="labbook55", description="my first labbook10000") cm = ComponentManager(lb) # Add packages cm.add_component("custom", ENV_UNIT_TEST_REPO, "pillow", 0) cm.add_component("custom", ENV_UNIT_TEST_REPO, "noop-2", 0) cm.add_component("custom", ENV_UNIT_TEST_REPO, "noop-1", 0) query = """ { labbook(owner: "default", name: "labbook55") { overview { numAptPackages numConda2Packages numConda3Packages numPipPackages numCustomDependencies } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_add_package(self, fixture_working_dir_env_repo_scoped, snapshot): """Test listing labbooks""" lb = LabBook(fixture_working_dir_env_repo_scoped[0]) labbook_dir = lb.new(name="catbook-package-tester", description="LB to test package mutation", owner={"username": "******"}) # Add a base image pkg_query = """ mutation myPkgMutation { addPackageComponents (input: { owner: "default", labbookName: "catbook-package-tester", packages: [{manager: "conda3", package: "requests", version: "2.18.4"}] }) { clientMutationId newPackageComponentEdges { node{ id schema manager package version fromBase } cursor } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(pkg_query))
def test_get_available_custom_deps_pagination_reverse( self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the available custom dependencies using pagination from the end""" query = """ { availableCustomDependencies(last: 1) { edges { node { id componentId repository revision name description tags license url requiredPackageManagers dockerSnippet } cursor } pageInfo { hasNextPage hasPreviousPage } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query)) query = """ { availableCustomDependencies(last: 2, before: "MQ==") { edges { node { id componentId repository revision name description tags license url requiredPackageManagers dockerSnippet } cursor } pageInfo { hasNextPage hasPreviousPage } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_readme(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting a labbook's readme document""" # Create labbook im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", "labbook77", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook77") { overview { readme } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query)) lb.write_readme("##Summary\nThis is my readme!!") snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_list_remote_labbooks_modified(self, fixture_working_dir, snapshot): """test list labbooks""" """test list labbooks""" responses.add(responses.GET, 'https://api.gigantum.com/read/projects?version=2&first=2&page=1&order_by=modified_on&sort=desc', json=list(reversed(DUMMY_DATA)), status=200) responses.add(responses.GET, 'https://api.gigantum.com/read/projects?version=2&first=10&page=1&order_by=modified_on&sort=asc', json=DUMMY_DATA, status=200) list_query = """ { labbookList{ remoteLabbooks(orderBy: "modified_on", sort: "desc", first: 2){ edges{ node{ id description creationDateUtc modifiedDateUtc name owner isLocal } cursor } pageInfo{ hasNextPage } } } }""" r = fixture_working_dir[2].execute(list_query) assert 'errors' not in r snapshot.assert_match(r) list_query = """ { labbookList{ remoteLabbooks(orderBy: "modified_on", sort: "asc", first: 10){ edges{ node{ id description creationDateUtc modifiedDateUtc name owner } cursor } pageInfo{ hasNextPage } } } }""" r = fixture_working_dir[2].execute(list_query) assert 'errors' not in r snapshot.assert_match(r)
def test_empty_package_counts(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the a LabBook's package manager dependencies""" # Create labbook im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", "labbook4", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook4") { overview { numAptPackages numConda2Packages numConda3Packages numPipPackages numCustomDependencies } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_file_node(self, fixture_working_dir, snapshot): """Test listing labbook favorites""" lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="labbook1", description="my first labbook1") # Setup some favorites in code with open(os.path.join(lb.root_dir, 'code', 'test1.txt'), 'wt') as test_file: test_file.write("blah1") # Create favorites lb.create_favorite("code", "test1.txt", description="My file with stuff 1") query = """ { node(id: "TGFiYm9va0ZpbGU6ZGVmYXVsdCZsYWJib29rMSZjb2RlJnRlc3QxLnR4dA==") { ... on LabbookFile { id key isDir size } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def test_remove_custom_dep(self, fixture_working_dir_env_repo_scoped, snapshot): """Test removing a custom dependency""" lb = LabBook(fixture_working_dir_env_repo_scoped[0]) labbook_dir = lb.new(name="labbook-remove-custom", description="my first labbook", owner={"username": "******"}) # Add a custom dep query = """ mutation myEnvMutation{ addCustomComponent(input: { owner: "default", labbookName: "labbook-remove-custom", repository: "gig-dev_components2", componentId: "pillow", revision: 0 }) { clientMutationId newCustomComponentEdge { node{ repository componentId revision name description } } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query)) # Verify file component_file = os.path.join(labbook_dir, '.gigantum', 'env', 'custom', "gig-dev_components2_pillow.yaml") assert os.path.exists(component_file) is True # Remove a custom dep query = """ mutation myEnvMutation{ removeCustomComponent(input: { owner: "default", labbookName: "labbook-remove-custom", repository: "gig-dev_components2", componentId: "pillow" }) { clientMutationId success } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query)) assert os.path.exists(component_file) is False
def test_create_labbook(self, fixture_working_dir_env_repo_scoped, snapshot): """Test listing labbooks""" # Mock the configuration class it it returns the same mocked config file # Create LabBook query = """ mutation myCreateLabbook($name: String!, $desc: String!, $repository: String!, $base_id: String!, $revision: Int!) { createLabbook(input: {name: $name, description: $desc, repository: $repository, baseId: $base_id, revision: $revision}) { labbook { id name description } } } """ variables = { "name": "test-lab-book1", "desc": "my test description", "base_id": ENV_UNIT_TEST_BASE, "repository": ENV_UNIT_TEST_REPO, "revision": ENV_UNIT_TEST_REV } snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute( query, variable_values=variables)) # Get LabBook you just created query = """ { labbook(name: "test-lab-book1", owner: "default") { activityRecords { edges{ node{ message type show importance tags username email detailObjects{ type data show importance tags } } } } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_BatchCompiler_1(snapshot): dir_tpl = os.path.join(FIXTURES_DIR, 'a') dir_out = tempfile.mkdtemp() variables = {'a': 'AAA', 'b': 'BBB'} try: batch = jj2c.BatchCompiler(variables, dir_tpl, dir_out, extensions=[]) batch.compile() snapshot.assert_match(collect_contents(dir_out)) finally: shutil.rmtree(dir_out)
def test_logged_in_user_invalid_token(self, fixture_working_dir, snapshot): query = """ { userIdentity{ isSessionValid } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def test_start_bundled_app(self, fixture_working_dir_env_repo_scoped, snapshot): """Test listing labbooks""" im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook('default', 'default', 'test-app-1', description="testing 1") bam = BundledAppManager(lb) bam.add_bundled_app(9999, "dash app 1", "my example bundled app 1", "echo test") lookup_query = """ { labbook(owner: "default", name: "test-app-1"){ id environment { id bundledApps{ id appName description port command } } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(lookup_query)) # Add a bundled app remove_query = """ mutation startDevTool { removeBundledApp (input: { owner: "default", labbookName: "test-app-1", appName: "dash app 2"}) { clientMutationId environment{ id bundledApps{ id appName description port command } } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(remove_query))
def test_delete_collaborator(self, mock_create_labbooks, property_mocks_fixture, snapshot, docker_socket_fixture): """Test deleting a collaborator from a LabBook""" # Setup REST mocks responses.add(responses.GET, 'https://repo.gigantum.io/api/v4/users?username=person100', json=[ { "id": 100, "name": "New Person", "username": "******", "state": "active", } ], status=200) responses.add(responses.DELETE, 'https://repo.gigantum.io/api/v4/projects/default%2Flabbook1/members/100', status=204) responses.add(responses.GET, 'https://repo.gigantum.io/api/v4/projects/default%2Flabbook1', json=[{ "id": 27, "description": "", }], status=200) responses.add(responses.GET, 'https://repo.gigantum.io/api/v4/projects/default%2Flabbook1/members', json=[ { "id": 29, "name": "Jane Doe", "username": "******", "access_level": 40, "expires_at": None } ], status=200) # Mock the request context so a fake authorization header is present builder = EnvironBuilder(path='/labbook', method='DELETE', headers={'Authorization': 'Bearer AJDFHASD'}) env = builder.get_environ() req = Request(environ=env) query = """ mutation DeleteCollaborator { deleteCollaborator( input: { owner: "default", labbookName: "labbook1", username: "******" }) { updatedLabbook { collaborators } } } """ snapshot.assert_match(mock_create_labbooks[2].execute(query, context_value=req))
def test_compile_dir_2_zip(snapshot): dir_tpl = os.path.join(FIXTURES_DIR, 'a') dir_out = tempfile.mkdtemp() o_zip = os.path.join(dir_out, 'o.zip') variables = {'a': 'AAA compile_dir_to_zip', 'b': 'BBB compile_dir_to_zip'} try: jj2c.compile_dir_2_zip(dir_tpl, o_zip, variables, extensions=[]) snapshot.assert_match(collect_contents_zip(o_zip)) finally: shutil.rmtree(dir_out)
def test_compile_zip_2_zip(snapshot): dir_tpl = os.path.join(FIXTURES_DIR, 'a') dir_out = tempfile.mkdtemp() tpl_zip = os.path.join(dir_out, 'a.zip') o_zip = os.path.join(dir_out, 'o.zip') shutil.make_archive(tpl_zip[:-4], 'zip', dir_tpl) variables = {'a': 'AAA zip', 'b': 'BBB zip'} try: jj2c.compile_zip_2_zip(tpl_zip, o_zip, variables, extensions=[]) snapshot.assert_match(collect_contents_zip(o_zip)) finally: shutil.rmtree(dir_out)
def test_logged_in_user(self, fixture_working_dir, snapshot): query = """ { userIdentity{ id username email givenName familyName } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def test_get_dataset_all_fields(self, fixture_working_dir_dataset_populated_scoped, snapshot): query = """{ dataset(name: "dataset8", owner: "default") { id name description schemaVersion activityRecords{ edges{ node{ message type show importance tags } } pageInfo{ hasNextPage hasPreviousPage } } datasetType{ name id description storageType readme tags icon } } } """ snapshot.assert_match(fixture_working_dir_dataset_populated_scoped[2].execute(query)) query = """{ dataset(name: "dataset8", owner: "default") { name createdOnUtc modifiedOnUtc } } """ result = fixture_working_dir_dataset_populated_scoped[2].execute(query) assert isinstance(result['data']['dataset']['createdOnUtc'], str) is True assert isinstance(result['data']['dataset']['modifiedOnUtc'], str) is True assert len(result['data']['dataset']['createdOnUtc']) > 10 assert len(result['data']['dataset']['modifiedOnUtc']) > 10
def test_move_file(self, mock_create_labbooks, snapshot): """Test moving a file""" labbook_dir = os.path.join(mock_create_labbooks[1], 'default', 'default', 'labbooks', 'labbook1') os.makedirs(os.path.join(labbook_dir, 'code', 'subdir')) query = """ mutation MoveLabbookFile { moveLabbookFile( input: { owner: "default", labbookName: "labbook1", section: "code", srcPath: "sillyfile", dstPath: "subdir/sillyfile" }) { newLabbookFileEdge { node{ key isDir size } } } } """ snapshot.assert_match(mock_create_labbooks[2].execute(query)) query = """ mutation MoveLabbookFile { moveLabbookFile( input: { owner: "default", labbookName: "labbook1", section: "code", srcPath: "subdir/", dstPath: "subdir2/" }) { newLabbookFileEdge { node{ key isDir size } } } } """ snapshot.assert_match(mock_create_labbooks[2].execute(query)) assert os.path.exists(os.path.join(labbook_dir, 'code', 'subdir2', 'sillyfile')) is True
def test_get_available_dataset_types(self, fixture_working_dir, snapshot): query = """ { availableDatasetTypes{ id name description isManaged storageType readme tags icon url } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def test_favorites_node(self, fixture_working_dir, snapshot): """Test listing labbook favorites""" lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="labbook1", description="my first labbook1") # Setup some favorites in code with open(os.path.join(lb.root_dir, 'code', 'test1.txt'), 'wt') as test_file: test_file.write("blah1") # Create favorites lb.create_favorite("code", "test1.txt", description="My file with stuff 1") # Test bad node that isn't a file query = """ { node(id: "TGFiYm9va0Zhdm9yaXRlOmRlZmF1bHQmbGFiYm9vazEmY29kZSZ0ZXN0MzMzLnR4dA==") { ... on LabbookFavorite { id key description isDir index } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query)) # Get the actual item query = """ { node(id: "TGFiYm9va0Zhdm9yaXRlOmRlZmF1bHQmbGFiYm9vazEmY29kZSZ0ZXN0MS50eHQ=") { ... on LabbookFavorite { id key description isDir index } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def test_create_dataset(self, fixture_working_dir, snapshot): query = """ mutation myCreateDataset($name: String!, $desc: String!, $storage_type: String!) { createDataset(input: {name: $name, description: $desc, storageType: $storage_type}) { dataset { id name description schemaVersion datasetType{ name id description } } } } """ variables = { "name": "test-dataset-1", "desc": "my test dataset", "storage_type": "gigantum_object_v1" } snapshot.assert_match(fixture_working_dir[2].execute( query, variable_values=variables)) # Get Dataset you just created query = """{ dataset(name: "test-dataset-1", owner: "default") { id name description schemaVersion datasetType{ name id description } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def test_move_dataset_file(self, fixture_working_dir, snapshot): im = InventoryManager(fixture_working_dir[0]) ds = im.create_dataset('default', 'default', "dataset-move", storage_type="gigantum_object_v1", description="testing move") m = Manifest(ds, 'default') revision = m.dataset_revision helper_append_file(m.cache_mgr.cache_root, revision, "test1.txt", "asdfasdghndfdf") m.sweep_all_changes() revision = m.dataset_revision cr = m.cache_mgr.cache_root assert os.path.exists(os.path.join(cr, revision, "test1.txt")) is True query = """ mutation myMutation { moveDatasetFile(input: {datasetOwner: "default", datasetName: "dataset-move", srcPath: "test1.txt", dstPath: "test1-renamed.txt"}) { updatedEdges { node { id key isDir isLocal size } } } } """ result = fixture_working_dir[2].execute(query) assert 'errors' not in result snapshot.assert_match(result) revision = m.dataset_revision cr = m.cache_mgr.cache_root assert os.path.exists(os.path.join(cr, revision, "test1.txt")) is False assert os.path.exists(os.path.join(cr, revision, "test1-renamed.txt")) is True