def test_cuda_version_property(self, mock_config_with_repo): """Test getting the cuda version""" im = InventoryManager(mock_config_with_repo[0]) lb = im.create_labbook('test', 'test', 'labbook1', description="my first labbook") assert lb.cuda_version is None # Add base without GPU support cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) base_yaml_file = glob.glob( os.path.join(lb.root_dir, '.gigantum', 'env', 'base', '*.yaml'))[0] assert lb.cuda_version is None # Fake a version with open(base_yaml_file, 'rt') as bf: base_data = yaml.safe_load(bf) base_data['cuda_version'] = '10.0' with open(base_yaml_file, 'wt') as bf: yaml.safe_dump(base_data, bf) assert lb.cuda_version == '10.0'
def mutate_and_get_payload(cls, root, info, name, description, repository, base_id, revision, is_untracked=False, client_mutation_id=None): username = get_logged_in_username() inv_manager = InventoryManager() if is_untracked: lb = inv_manager.create_labbook_disabled_lfs( username=username, owner=username, labbook_name=name, description=description, author=get_logged_in_author()) else: lb = inv_manager.create_labbook(username=username, owner=username, labbook_name=name, description=description, author=get_logged_in_author()) if is_untracked: FileOperations.set_untracked(lb, 'input') FileOperations.set_untracked(lb, 'output') input_set = FileOperations.is_set_untracked(lb, 'input') output_set = FileOperations.is_set_untracked(lb, 'output') if not (input_set and output_set): raise ValueError( f'{str(lb)} untracking for input/output in malformed state' ) if not lb.is_repo_clean: raise ValueError( f'{str(lb)} should have clean Git state after setting for untracked' ) adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False, importance=0) adr.add_value('text/plain', f"Created new LabBook: {username}/{name}") # Create activity record ar = ActivityRecord(ActivityType.LABBOOK, message=f"Created new LabBook: {username}/{name}", show=True, importance=255, linked_commit=lb.git.commit_hash) ar.add_detail_object(adr) store = ActivityStore(lb) store.create_activity_record(ar) cm = ComponentManager(lb) cm.add_base(repository, base_id, revision) return CreateLabbook(labbook=Labbook(owner=username, name=lb.name))
def test_add_duplicate_base(self, mock_config_with_repo): """Test adding a duplicate base to a labbook""" # Create a labook lb = create_tmp_labbook(mock_config_with_repo[0]) labbook_dir = lb.root_dir # Create Component Manager cm = ComponentManager(lb) # Add a component; cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, gtmcore.fixtures.ENV_UNIT_TEST_BASE, gtmcore.fixtures.ENV_UNIT_TEST_REV) c = f"{gtmcore.fixtures.ENV_UNIT_TEST_REPO}_{gtmcore.fixtures.ENV_UNIT_TEST_BASE}.yaml" # Verify file component_file = os.path.join(labbook_dir, '.gigantum', 'env', 'base', c) assert os.path.exists(component_file) is True # Add a component with pytest.raises(ValueError): cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, gtmcore.fixtures.ENV_UNIT_TEST_BASE, gtmcore.fixtures.ENV_UNIT_TEST_REV)
def build_lb_image_for_env_conda(mock_config_with_repo): """A fixture that installs an old version of matplotlib and latest version of requests to increase code coverage""" im = InventoryManager(mock_config_with_repo[0]) lb = im.create_labbook('unittester', 'unittester', "containerunittestbookenvconda", description="Testing environment functions.") cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) cm.add_packages('conda3', [{'package': 'python-coveralls', 'version': '2.7.0'}]) ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) client = get_docker_client() client.containers.prune() try: lb, docker_image_id = ContainerOperations.build_image(labbook=lb, username="******") yield lb, 'unittester' finally: shutil.rmtree(lb.root_dir) try: client.images.remove(docker_image_id, force=True, noprune=False) except: pass
def test_bundled_app_lines(self, mock_labbook): """Test if the Dockerfile builds with bundled app ports""" lb = mock_labbook[2] bam = BundledAppManager(lb) bam.add_bundled_app(8050, 'dash 1', 'a demo dash app 1', 'python app1.py') bam.add_bundled_app(9000, 'dash 2', 'a demo dash app 2', 'python app2.py') bam.add_bundled_app(9001, 'dash 3', 'a demo dash app 3', 'python app3.py') erm = RepositoryManager(mock_labbook[0]) erm.update_repositories() erm.index_repositories() cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) cm.add_packages("pip", [{ "manager": "pip", "package": "requests", "version": "2.18.4" }]) ib = ImageBuilder(lb) dockerfile_text = ib.assemble_dockerfile(write=False) test_lines = [ '# Bundled Application Ports', 'EXPOSE 8050', 'EXPOSE 9000', 'EXPOSE 9001' ] docker_lines = dockerfile_text.split(os.linesep) for line in test_lines: assert line in docker_lines
def build_lb_image_for_env(mock_config_with_repo): # Create a labook im = InventoryManager(mock_config_with_repo[0]) lb = im.create_labbook('unittester', 'unittester', "containerunittestbookenv", description="Testing environment functions.") # Create Component Manager cm = ComponentManager(lb) # Add a component cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) client = get_docker_client() client.containers.prune() try: lb, docker_image_id = ContainerOperations.build_image(labbook=lb, username="******") yield lb, 'unittester' finally: shutil.rmtree(lb.root_dir) # Remove image if it's still there try: client.images.remove(docker_image_id, force=True, noprune=False) except: pass
def test_build_image_no_cache(self, fixture_working_dir_env_repo_scoped, reset_images): """Test building a labbook's image""" im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", "labbook-build2", description="building an env") cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, "ut-busybox", 0) query = """ { labbook(name: "labbook-build2", owner: "default") { environment { imageStatus containerStatus } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['imageStatus'] == 'DOES_NOT_EXIST' assert r['data']['labbook']['environment']['containerStatus'] == 'NOT_RUNNING' # Build the image build_query = """ mutation myBuildImage { buildImage(input: {labbookName: "labbook-build2", owner: "default", noCache: true}) { environment { imageStatus containerStatus } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(build_query) assert 'errors' not in r assert r['data']['buildImage']['environment']['imageStatus'] in ['BUILD_QUEUED', 'BUILD_IN_PROGRESS'] assert r['data']['buildImage']['environment']['containerStatus'] == 'NOT_RUNNING' # Wait for build to succeed for up to TIMEOUT_MAX seconds success = False for _ in range(TIMEOUT_MAX): result = fixture_working_dir_env_repo_scoped[2].execute(query) if result['data']['labbook']['environment']['imageStatus'] == 'EXISTS': success = True break assert result['data']['labbook']['environment']['imageStatus'] == 'BUILD_IN_PROGRESS' time.sleep(1) assert success is True, f"Failed to build within {TIMEOUT_MAX} second timeout." r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['imageStatus'] == 'EXISTS' assert r['data']['labbook']['environment']['containerStatus'] == 'NOT_RUNNING'
def test_try_configuring_two_bases(self, mock_config_with_repo): conf_file = mock_config_with_repo[0] lb = create_tmp_labbook(conf_file) cm = ComponentManager(lb) # mock_config_with_repo is a ComponentManager Instance cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, "ut-jupyterlab-1", 0) with pytest.raises(ValueError): cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, "ut-jupyterlab-2", 0)
def test_misconfigured_base_two_bases(self, mock_config_with_repo): lb = create_tmp_labbook(mock_config_with_repo[0]) cm = ComponentManager(lb) # mock_config_with_repo is a ComponentManager Instance cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, "ut-jupyterlab-1", 0) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, "ut-jupyterlab-2", 0) with pytest.raises(ValueError): a = cm.base_fields
def test_cancel_build(self, fixture_working_dir_env_repo_scoped, reset_images): im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", "labbook-build-cancel", description="building an env") cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, "ut-busybox", 0) cm.add_docker_snippet('customdocker', ['RUN sleep 5']) # Build the image build_query = """ mutation myBuildImage { buildImage(input: { labbookName: "labbook-build-cancel", owner: "default", noCache: true }) { environment { imageStatus containerStatus } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(build_query) time.sleep(1) assert 'errors' not in r assert r['data']['buildImage']['environment']['imageStatus'] == 'BUILD_IN_PROGRESS' cancel_query = """ mutation myCancel { cancelBuild(input: { labbookName: "labbook-build-cancel", owner: "default" }) { buildStopped message } }""" cancel_r = fixture_working_dir_env_repo_scoped[2].execute(cancel_query) assert 'errors' not in cancel_r assert cancel_r['data']['cancelBuild']['buildStopped'] == True check_query = """ { labbook(name: "labbook-build-cancel", owner: "default") { environment { imageStatus containerStatus } } } """ check_r = fixture_working_dir_env_repo_scoped[2].execute(check_query) assert 'errors' not in check_r assert check_r['data']['labbook']['environment']['imageStatus'] == 'BUILD_FAILED'
def test_build_and_start_and_stop_labbook_container( self, mock_config_file): erm = RepositoryManager(mock_config_file[0]) erm.update_repositories() erm.index_repositories() # Create a labbook lb = InventoryManager(mock_config_file[0]).create_labbook( 'unittester', 'unittester', 'unittest-start-stop-job', description="Testing docker building.") cm = ComponentManager(lb) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 2) ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) client = get_docker_client() img_list = client.images.list() try: from gtmcore.container.utils import infer_docker_image_name owner = InventoryManager().query_owner(lb) client.images.remove( infer_docker_image_name(labbook_name=lb.name, owner=owner, username='******')) except: pass docker_kwargs = { 'path': lb.root_dir, 'nocache': True, 'username': '******' } image_id = jobs.build_labbook_image(**docker_kwargs) startc_kwargs = { 'root': lb.root_dir, 'config_path': lb.client_config.config_file, 'username': '******' } # Start the docker container, and then wait till it's done. container_id = jobs.start_labbook_container(**startc_kwargs) assert get_docker_client().containers.get( container_id).status == 'running' # Stop the docker container, and wait until that is done. jobs.stop_labbook_container(container_id) with pytest.raises(Exception): # Should not be found because the stop job cleans up get_docker_client().containers.get(container_id)
def test_list_versions_from_fallback(self, mock_config_with_repo): """Test list_versions command""" username = "******" im = InventoryManager(mock_config_with_repo[0]) lb = im.create_labbook( 'unittest', 'unittest', 'labbook-unittest-01', description="From mock_config_from_repo fixture") # Create Component Manager cm = ComponentManager(lb) # Add a component cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) client = get_docker_client() try: lb, docker_image_id = ContainerOperations.build_image( labbook=lb, username=username) # Test lookup mrg = PipPackageManager() result = mrg.search("peppercorn", lb, username) assert len(result) == 2 result = mrg.search("gigantum", lb, username) assert len(result) == 4 assert result[0] == "gigantum" # Delete image client.images.remove(docker_image_id, force=True, noprune=False) # Test lookup still works mrg = PipPackageManager() result = mrg.search("peppercorn", lb, username) assert len(result) == 2 result = mrg.search("gigantum", lb, username) assert len(result) == 4 assert result[0] == "gigantum" finally: shutil.rmtree(lb.root_dir) # Remove image if it's still there try: client.images.remove(docker_image_id, force=True, noprune=False) except: pass
def test_base_update_available(self, fixture_working_dir_env_repo_scoped, snapshot): """Test checking if the base is able to be updated""" im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook('default', 'default', 'labbook-base-test-update') cm = ComponentManager(lb) # Add an old base. cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 1) query = """ { labbook(owner: "default", name: "labbook-base-test-update") { name description environment { base{ id revision } baseLatestRevision } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['base']['revision'] == 1 assert r['data']['labbook']['environment']['baseLatestRevision'] == 2 # We upgrade our base to the latest cm.change_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 2) r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['base']['revision'] == 2 assert r['data']['labbook']['environment']['baseLatestRevision'] == 2 query = """ { labbook(owner: "default", name: "labbook-base-test-update") { name environment { baseLatestRevision } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['baseLatestRevision'] == 2
def build_lb_image_for_jupyterlab(mock_config_with_repo): with patch.object(Configuration, 'find_default_config', lambda self: mock_config_with_repo[0]): im = InventoryManager(mock_config_with_repo[0]) lb = im.create_labbook('unittester', 'unittester', "containerunittestbook") # Create Component Manager cm = ComponentManager(lb) # Add a component cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) cm.add_packages("pip", [{"manager": "pip", "package": "requests", "version": "2.18.4"}]) ib = ImageBuilder(lb) docker_lines = ib.assemble_dockerfile(write=True) assert 'RUN pip install requests==2.18.4' in docker_lines assert all(['==None' not in l for l in docker_lines.split()]) assert all(['=None' not in l for l in docker_lines.split()]) client = get_docker_client() client.containers.prune() assert os.path.exists(os.path.join(lb.root_dir, '.gigantum', 'env', 'entrypoint.sh')) try: lb, docker_image_id = ContainerOperations.build_image(labbook=lb, username="******") lb, container_id = ContainerOperations.start_container(lb, username="******") assert isinstance(container_id, str) yield lb, ib, client, docker_image_id, container_id, None, 'unittester' try: _, s = ContainerOperations.stop_container(labbook=lb, username="******") except docker.errors.APIError: client.containers.get(container_id=container_id).stop(timeout=2) s = False finally: shutil.rmtree(lb.root_dir) # Stop and remove container if it's still there try: client.containers.get(container_id=container_id).stop(timeout=2) client.containers.get(container_id=container_id).remove() except: pass # Remove image if it's still there try: ContainerOperations.delete_image(labbook=lb, username='******') client.images.remove(docker_image_id, force=True, noprune=False) except: pass try: client.images.remove(docker_image_id, force=True, noprune=False) except: pass
def test_get_base(self, mock_config_with_repo): lb = create_tmp_labbook(mock_config_with_repo[0]) cm = ComponentManager(lb) # mock_config_with_repo is a ComponentManager Instance cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, "ut-jupyterlab-1", 0) base_data = cm.base_fields assert type(base_data) == dict assert base_data['name'] == 'Unit Test1' assert base_data['os_class'] == 'ubuntu' assert base_data['schema'] == 1
def test_misconfigured_base_two_bases(self, mock_config_with_repo): conf_file = mock_config_with_repo[0] lb = create_tmp_labbook(conf_file) cm = ComponentManager(lb) # mock_config_with_repo is a ComponentManager Instance cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, "ut-jupyterlab-1", 0) # Updated logic to enable changing bases won't allow `add_base` again. Need to manually create a file bad_base_config = Path(cm.env_dir, 'base', 'evil_repo_quantum-deathray.yaml') bad_base_config.write_text("I'm gonna break you!") with pytest.raises(ValueError): a = cm.base_fields
def test_docker_snippet(self, mock_labbook): lb = mock_labbook[2] package_manager_dir = os.path.join(lb.root_dir, '.gigantum', 'env', 'custom') erm = RepositoryManager(mock_labbook[0]) erm.update_repositories() erm.index_repositories() cm = ComponentManager(lb) custom = ['RUN true', 'RUN touch /tmp/cat', 'RUN rm /tmp/cat'] cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) cm.add_packages("pip", [{"manager": "pip", "package": "requests", "version": "2.18.4"}]) cm.add_docker_snippet('test-docker', custom, description="Apostrophe's and wėįrd çhårāčtêrś") ib = ImageBuilder(lb) l = ib.assemble_dockerfile() assert all([any([i in l for i in custom]) for n in custom])
def test_get_base_empty_error(self, mock_config_with_repo): lb = create_tmp_labbook(mock_config_with_repo[0]) cm = ComponentManager(lb) # mock_config_with_repo is a ComponentManager Instance cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, "ut-jupyterlab-1", 0) base_filename = f"gigantum_base-images-testing_ut-jupyterlab-1.yaml" base_final_path = os.path.join(cm.env_dir, 'base', base_filename) with open(base_final_path, 'wt') as cf: cf.write(yaml.safe_dump({}, default_flow_style=False)) with pytest.raises(ValueError): cm.base_fields
def test_get_component_list_base(self, mock_config_with_repo): """Test listing base images added a to labbook""" lb = create_tmp_labbook(mock_config_with_repo[0]) labbook_dir = lb.root_dir cm = ComponentManager(lb) # mock_config_with_repo is a ComponentManager Instance cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, gtmcore.fixtures.ENV_UNIT_TEST_BASE, gtmcore.fixtures.ENV_UNIT_TEST_REV) bases = cm.get_component_list('base') assert len(bases) == 1 assert bases[0]['id'] == gtmcore.fixtures.ENV_UNIT_TEST_BASE assert bases[0]['revision'] == gtmcore.fixtures.ENV_UNIT_TEST_REV
def test_fail_import_export_zip(self, mock_config_with_repo): # Create new LabBook to be exported lb = InventoryManager(mock_config_with_repo[0]).create_labbook( 'test', 'test', "lb-fail-export-import-test", description="Failing import-export.") cm = ComponentManager(lb) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, gtmcore.fixtures.ENV_UNIT_TEST_BASE, gtmcore.fixtures.ENV_UNIT_TEST_REV) lb_root = lb.root_dir with tempfile.TemporaryDirectory() as temp_dir_path: # Export the labbook export_dir = os.path.join(mock_config_with_repo[1], "export") try: exported_archive_path = jobs.export_labbook_as_zip( "/tmp", export_dir) assert False, "Exporting /tmp should fail" except Exception as e: pass # Export the labbook, then remove before re-importing exported_archive_path = jobs.export_labbook_as_zip( lb.root_dir, export_dir) try: imported_lb_path = jobs.import_labboook_from_zip( archive_path=exported_archive_path, username="******", owner="test", config_file=mock_config_with_repo[0]) assert False, f"Should not be able to import LabBook because it already exited at {lb_root}" except Exception as e: pass try: imported_lb_path = jobs.import_labboook_from_zip( archive_path="/t", username="******", owner="test", config_file=mock_config_with_repo[0]) assert False, f"Should not be able to import LabBook from strange directory /t" except Exception as e: pass
def test_add_base(self, mock_config_with_repo): """Test adding a base to a labbook""" # Create a labook lb = create_tmp_labbook(mock_config_with_repo[0]) labbook_dir = lb.root_dir # Create Component Manager cm = ComponentManager(lb) # Add a component cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, gtmcore.fixtures.ENV_UNIT_TEST_BASE, gtmcore.fixtures.ENV_UNIT_TEST_REV) # Verify file component_file = os.path.join( labbook_dir, '.gigantum', 'env', 'base', f"{gtmcore.fixtures.ENV_UNIT_TEST_REPO}_" f"{gtmcore.fixtures.ENV_UNIT_TEST_BASE}.yaml") assert os.path.exists(component_file) is True with open(component_file, 'rt') as cf: data = yaml.safe_load(cf) preinstalled_pkgs = os.listdir( os.path.join(labbook_dir, ".gigantum/env/package_manager")) pkg_yaml_files = [n for n in preinstalled_pkgs if '.yaml' in n] assert len(pkg_yaml_files) == 7 for p in pkg_yaml_files: with open( os.path.join(labbook_dir, ".gigantum/env/package_manager", p)) as f: assert 'from_base: true' in f.read() assert data['id'] == gtmcore.fixtures.ENV_UNIT_TEST_BASE assert data['revision'] == gtmcore.fixtures.ENV_UNIT_TEST_REV # Verify git/activity log = lb.git.log() assert len(log) >= 4 assert "_GTM_ACTIVITY_START_" in log[0]["message"] assert 'Added base:' in log[0]["message"] assert "_GTM_ACTIVITY_START_" in log[2]["message"] assert 'Added 6 pip3 package(s)' in log[2]["message"] assert "_GTM_ACTIVITY_START_" in log[4]["message"] assert 'Added 1 apt package(s)' in log[4]["message"]
def test_build_docker_image(self, temporary_worker, mock_config_file): w, d = temporary_worker erm = RepositoryManager(mock_config_file[0]) erm.update_repositories() erm.index_repositories() im = InventoryManager(mock_config_file[0]) lb = im.create_labbook('unittester', 'unittester', 'unittest-dispatcher-build-image', description="Testing docker building.") cm = ComponentManager(lb) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'ut-busybox', 0) ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) assert os.path.exists( os.path.join(lb.root_dir, '.gigantum', 'env', 'Dockerfile')) docker_kwargs = {'path': lb.root_dir, 'nocache': True} job_ref = d.dispatch_task(bg_jobs.build_labbook_image, kwargs=docker_kwargs) elapsed_time = 0 while True: status = d.query_task(job_ref).status print(status) r = d.query_task(job_ref) print(r.meta) if elapsed_time > 2: assert r.meta.get('feedback') if status in ['success', 'failed', 'finished']: print(r.exc_info) break if elapsed_time > 60: w.terminate() assert False, "timed out {}".format(status) elapsed_time = elapsed_time + 1 time.sleep(1) w.terminate() res = d.query_task(job_ref) assert res assert res.status == 'finished'
def mutate_and_get_payload(cls, root, info, name, description, repository, base_id, revision, is_untracked=False, client_mutation_id=None): username = get_logged_in_username() inv_manager = InventoryManager() lb = inv_manager.create_labbook(username=username, owner=username, labbook_name=name, description=description, author=get_logged_in_author()) adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False, importance=0) adr.add_value('text/plain', f"Created new LabBook: {username}/{name}") # Create activity record ar = ActivityRecord(ActivityType.LABBOOK, message=f"Created new LabBook: {username}/{name}", show=True, importance=255, linked_commit=lb.git.commit_hash) ar.add_detail_object(adr) store = ActivityStore(lb) store.create_activity_record(ar) cm = ComponentManager(lb) cm.add_base(repository, base_id, revision) return CreateLabbook(labbook=Labbook(owner=username, name=lb.name))
def test_package_query_with_errors_apt( self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for package info""" # Create labbook im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", "labbook5apt", description="my first labbook10000") # Create Component Manager cm = ComponentManager(lb) # Add a component cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) query = """ { labbook(owner: "default", name: "labbook5apt"){ id checkPackages(packageInput: [ {manager: "apt", package: "curl", version:"8.1"}, {manager: "apt", package: "notarealpackage", version:""}]){ id manager package version latestVersion description isValid } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_success_import_export_lbk(self, mock_config_with_repo): """Test legacy .lbk extension still works""" # Create new LabBook to be exported lb = InventoryManager(mock_config_with_repo[0]).create_labbook( 'unittester', 'unittester', "unittest-lb-for-export-import-test-lbk", description="Testing import-export.") cm = ComponentManager(lb) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, gtmcore.fixtures.ENV_UNIT_TEST_BASE, gtmcore.fixtures.ENV_UNIT_TEST_REV) ib = ImageBuilder(lb) ib.assemble_dockerfile() # Make sure the destination user exists locally working_dir = lb.client_config.config['git']['working_directory'] os.makedirs(os.path.join(working_dir, 'unittester2', 'unittester2', 'labbooks'), exist_ok=True) lb_root = lb.root_dir with tempfile.TemporaryDirectory() as temp_dir_path: # Export the labbook export_dir = os.path.join(mock_config_with_repo[1], "export") exported_archive_path = jobs.export_labbook_as_zip( lb.root_dir, export_dir) tmp_archive_path = shutil.copy(exported_archive_path, '/tmp') lbk_archive_path = tmp_archive_path.replace(".zip", ".lbk") lbk_archive_path = shutil.copy(tmp_archive_path, lbk_archive_path) print(lbk_archive_path) # Delete the labbook shutil.rmtree(lb.root_dir) assert not os.path.exists( lb_root), f"LabBook at {lb_root} should not exist." assert os.path.exists(tmp_archive_path) # Now import the labbook as a new user, validating that the change of namespace works properly. imported_lb_path = jobs.import_labboook_from_zip( archive_path=lbk_archive_path, username='******', owner='unittester2', config_file=mock_config_with_repo[0]) assert not os.path.exists(lbk_archive_path) tmp_archive_path = shutil.copy(exported_archive_path, '/tmp') assert os.path.exists(tmp_archive_path) # New path should reflect username of new owner and user. assert imported_lb_path == lb_root.replace( '/unittester/unittester/', '/unittester2/unittester2/') import_lb = InventoryManager( mock_config_with_repo[0]).load_labbook_from_directory( imported_lb_path) ib = ImageBuilder(import_lb) ib.assemble_dockerfile(write=True) assert os.path.exists( os.path.join(imported_lb_path, '.gigantum', 'env', 'Dockerfile')) assert not import_lb.has_remote
def test_start_and_stop_docker_container(self, temporary_worker, mock_config_file): # start_docker_container(docker_image_id, exposed_ports, volumes_dict) -> str: w, d = temporary_worker erm = RepositoryManager(mock_config_file[0]) erm.update_repositories() erm.index_repositories() # Create a labbook lb = InventoryManager(mock_config_file[0]).create_labbook( 'unittester', 'unittester', 'unittest-start-stop-job', description="Testing docker building.") cm = ComponentManager(lb) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 2) ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) docker_kwargs = { 'path': lb.root_dir, 'nocache': True, 'username': '******' } client = get_docker_client() img_list = client.images.list() try: from gtmcore.container.utils import infer_docker_image_name owner = InventoryManager().query_owner(lb) client.images.remove( infer_docker_image_name(labbook_name=lb.name, owner=owner, username='******')) except: pass m = {'method': 'build_image', 'labbook': "unittest-start-stop-job"} job_ref = d.dispatch_task(bg_jobs.build_labbook_image, kwargs=docker_kwargs, metadata=m) j = d.query_task(job_ref) assert hasattr(j, 'meta') assert j.meta.get('labbook') == "unittest-start-stop-job" elapsed_time = 0 while True: status = d.query_task(job_ref).status print(status) if status in ['success', 'failed', 'finished']: print(d.query_task(job_ref).exc_info) break if elapsed_time > 60: w.terminate() assert False, "timed out {}".format(status) elapsed_time = elapsed_time + 1 time.sleep(1) res = d.query_task(job_ref) assert res print(res.status) assert res.status == 'finished' # Finish building image startc_kwargs = { 'root': lb.root_dir, 'config_path': lb.client_config.config_file, 'username': '******' } # Start the docker container, and then wait till it's done. container_id = bg_jobs.start_labbook_container(**startc_kwargs) time.sleep(5) assert get_docker_client().containers.get( container_id).status == 'running' # Stop the docker container, and wait until that is done. print(container_id) bg_jobs.stop_labbook_container(container_id) w.terminate()
def test_export_and_import_lb(self, fixture_working_dir_env_repo_scoped): api_server_proc = multiprocessing.Process(target=service.main, kwargs={'debug': False}) api_server_proc.daemon = True api_server_proc.start() assert api_server_proc.is_alive() time.sleep(5) assert api_server_proc.is_alive() # Make and validate request assert api_server_proc.is_alive() lb_name = "mutation-export-import-unittest" im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", lb_name, description="Import/Export Mutation Testing.") cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, 'ut-busybox', 0) assert api_server_proc.is_alive() export_query = """ mutation export { exportLabbook(input: { owner: "default", labbookName: "%s" }) { jobKey } } """ % lb.name r = fixture_working_dir_env_repo_scoped[2].execute(export_query) pprint.pprint(r) # Sleep while the background job completes, and then delete new lb. time.sleep(5) d = Dispatcher() job_status = d.query_task(JobKey(r['data']['exportLabbook']['jobKey'])) # Delete existing labbook in file system. shutil.rmtree(lb.root_dir) assert api_server_proc.is_alive() assert job_status.status == 'finished' assert not os.path.exists(lb.root_dir) assert os.path.exists(job_status.result) pprint.pprint(job_status.result) if os.path.exists(os.path.join('/tmp', os.path.basename(job_status.result))): os.remove(os.path.join('/tmp', os.path.basename(job_status.result))) new_path = shutil.move(job_status.result, '/tmp') # Now, import the labbook that was just exported. export_query = """ mutation import { importLabbook(input: { }) { jobKey } } """ files = {'uploadFile': open(new_path, 'rb')} qry = {"query": export_query} assert api_server_proc.is_alive() r = requests.post('http://localhost:10001/labbook/', data=qry, files=files) time.sleep(0.5) pprint.pprint(r) assert 'errors' not in r time.sleep(2)
def test_change_base(self, mock_labbook): """change_base is used both for updating versions and truly changing the base""" conf_file, root_dir, lb = mock_labbook # Initial configuration for the labbook - base config taken from `test_add_base` and package config from # `test_add_package` - so we don't test assertions (again) on this part cm = ComponentManager(lb) # We "misconfigure" a package that is not part of the base as if it was from a base # This shouldn't happen, but we address it just in case pkgs = [ { "manager": "pip3", "package": "gigantum", "version": "0.5" }, # pandas *is* part of the quickstart-juypter base, but we specify a different version here { "package": "pandas", "version": "0.21" } ] cm.add_packages('pip3', pkgs, force=True, from_base=True) packages = [p for p in cm.get_component_list('package_manager')] assert (len(packages) == 2) assert (all(p['from_base'] for p in packages)) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 1) # After installing the base, we should have one version of matplotlib installed packages = [ p for p in cm.get_component_list('package_manager') if p['package'] == 'matplotlib' ] assert (len(packages) == 1) assert (packages[0]['version'] == '2.1.1') # add_base() should have converted these to user-installed packages = [ p for p in cm.get_component_list('package_manager') if p['package'] in ['gigantum', 'pandas'] ] # If we had redundancy from fake base-installed pandas plus real base-installed pandas, this would be 3 assert (len(packages) == 2) for p in packages: # Fake base-installed is converted to user ("not from_base") installed assert (not p['from_base']) if p['package'] == 'pandas': # we should still have the fake base-installed version assert (p['version'] == '0.21') pkgs = [ { "manager": "pip3", "package": "requests", "version": "2.18.2" }, # This will override an already installed package { "manager": "pip3", "package": "matplotlib", "version": "2.2" } ] cm.add_packages('pip3', pkgs, force=True) pkgs = [{ "manager": "apt", "package": "ack", "version": "1.0" }, { "manager": "apt", "package": "docker", "version": "3.5" }] cm.add_packages('apt', pkgs) # Installing a customized version of matplotlib is a new package compared to other tests, # and is a critical piece of testing cm.change_base packages = [ p for p in cm.get_component_list('package_manager') if p['package'] == 'matplotlib' ] assert (len(packages) == 1) assert (packages[0]['version'] == '2.2') # We upgrade our base cm.change_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 2) # matplotlib still set up per "user" update? packages = [ p for p in cm.get_component_list('package_manager') if p['package'] == 'matplotlib' ] assert (len(packages) == 1) assert (packages[0]['version'] == '2.2') # Base revision now 2? assert (cm.base_fields['revision'] == 2)
def test_success_import_export_zip(self, mock_config_with_repo): # Create new LabBook to be exported im = InventoryManager(mock_config_with_repo[0]) lb = im.create_labbook('unittester', 'unittester', "unittest-lb-for-export-import-test", description="Testing import-export.") cm = ComponentManager(lb) cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, gtmcore.fixtures.ENV_UNIT_TEST_BASE, gtmcore.fixtures.ENV_UNIT_TEST_REV) ib = ImageBuilder(lb) ib.assemble_dockerfile() # Make sure the destination user exists locally working_dir = lb.client_config.config['git']['working_directory'] os.makedirs(os.path.join(working_dir, 'unittester2', 'unittester2', 'labbooks'), exist_ok=True) lb_root = lb.root_dir with tempfile.TemporaryDirectory() as temp_dir_path: # Export the labbook export_dir = os.path.join(mock_config_with_repo[1], "export") exported_archive_path = jobs.export_labbook_as_zip( lb.root_dir, export_dir) tmp_archive_path = shutil.copy(exported_archive_path, '/tmp') # Delete the labbook shutil.rmtree(lb.root_dir) assert not os.path.exists( lb_root), f"LabBook at {lb_root} should not exist." assert os.path.exists(tmp_archive_path) # Now import the labbook as a new user, validating that the change of namespace works properly. imported_lb_path = jobs.import_labboook_from_zip( archive_path=tmp_archive_path, username='******', owner='unittester2', config_file=mock_config_with_repo[0]) assert not os.path.exists(tmp_archive_path) tmp_archive_path = shutil.copy(exported_archive_path, '/tmp') assert os.path.exists(tmp_archive_path) # New path should reflect username of new owner and user. assert imported_lb_path == lb_root.replace( '/unittester/unittester/', '/unittester2/unittester2/') import_lb = InventoryManager( mock_config_with_repo[0]).load_labbook_from_directory( imported_lb_path) ib = ImageBuilder(import_lb) ib.assemble_dockerfile(write=True) assert os.path.exists( os.path.join(imported_lb_path, '.gigantum', 'env', 'Dockerfile')) assert not import_lb.has_remote # Repeat the above, except with the original user (e.g., re-importing their own labbook) user_import_lb = jobs.import_labboook_from_zip( archive_path=tmp_archive_path, username="******", owner="unittester", config_file=mock_config_with_repo[0]) assert not os.path.exists(tmp_archive_path) # New path should reflect username of new owner and user. assert user_import_lb import_lb2 = InventoryManager( mock_config_with_repo[0]).load_labbook_from_directory( user_import_lb) # After importing, the new user (in this case "cat") should be the current, active workspace. # And be created, if necessary. assert not import_lb2.has_remote build_kwargs = { 'path': lb.root_dir, 'username': '******', 'nocache': True } docker_image_id = jobs.build_labbook_image(**build_kwargs) try: client = get_docker_client() client.images.remove(docker_image_id) except Exception as e: pprint.pprint(e) raise
def test_build_image(self, fixture_working_dir_env_repo_scoped, reset_images): """Test building a labbook's image""" im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", "labbook-build1", description="building an env") cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, "ut-busybox", 0) query = """ { labbook(name: "labbook-build1", owner: "default") { environment { imageStatus containerStatus } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['imageStatus'] == 'DOES_NOT_EXIST' assert r['data']['labbook']['environment']['containerStatus'] == 'NOT_RUNNING' # Build the image build_query = """ mutation myBuildImage { buildImage(input: {labbookName: "labbook-build1", owner: "default"}) { environment { imageStatus containerStatus } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(build_query) import pprint; pprint.pprint(r) assert 'errors' not in r assert r['data']['buildImage']['environment']['imageStatus'] in ['BUILD_QUEUED', 'BUILD_IN_PROGRESS'] assert r['data']['buildImage']['environment']['containerStatus'] == 'NOT_RUNNING' ## Sneak in a test for background jobs get_bg_jobs_query = """ { labbook(name: "labbook-build1", owner: "default") { backgroundJobs { jobKey status failureMessage jobMetadata startedAt result } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(get_bg_jobs_query) assert 'errors' not in r, "There should be no errors when querying for background job status" assert r['data']['labbook']['backgroundJobs'][0]['status'], "Background Jobs status query should not be None" pprint.pprint(r) # Wait for build to succeed for up to TIMEOUT_MAX seconds success = False for _ in range(TIMEOUT_MAX): result = fixture_working_dir_env_repo_scoped[2].execute(query) if result['data']['labbook']['environment']['imageStatus'] == 'EXISTS': success = True break assert result['data']['labbook']['environment']['imageStatus'] == 'BUILD_IN_PROGRESS' time.sleep(1) r = fixture_working_dir_env_repo_scoped[2].execute(get_bg_jobs_query) assert 'errors' not in r assert r['data']['labbook']['backgroundJobs'][0]['status'] == 'finished' assert r['data']['labbook']['backgroundJobs'][0]['result'].isalnum() assert 'build_image' in r['data']['labbook']['backgroundJobs'][0]['jobMetadata'] pprint.pprint(r) assert success is True, f"Failed to build within {TIMEOUT_MAX} second timeout." r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['imageStatus'] == 'EXISTS' assert r['data']['labbook']['environment']['containerStatus'] == 'NOT_RUNNING'