def test_create_user_note_no_body(self, fixture_working_dir, snapshot): """Test creating and getting a user note""" # Create labbook lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="user-note-test", description="testing user notes") # Create a user note query = """ mutation makeUserNote { createUserNote(input: { owner: "default", labbookName: "user-note-test", title: "I think this is a thing" }) { newActivityRecordEdge { node{ message detailObjects{ data type show importance tags } type show importance tags } } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query))
def mock_create_labbooks(fixture_working_dir): # Create a labbook in the temporary directory config_file = fixture_working_dir[0] lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": UT_USERNAME}, name=UT_LBNAME, description="Cats labbook 1") # Create a file in the dir with open(os.path.join(fixture_working_dir[1], 'unittest-examplefile'), 'w') as sf: sf.write("test data") sf.seek(0) FileOperations.insert_file(lb, 'code', sf.name) assert os.path.isfile( os.path.join(lb.root_dir, 'code', 'unittest-examplefile')) # Create test client schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations) with patch.object(Configuration, 'find_default_config', lambda self: config_file): app = Flask("lmsrvlabbook") app.config["LABMGR_CONFIG"] = Configuration() app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration()) with app.app_context(): flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile() client = Client( schema, middleware=[LabBookLoaderMiddleware(), error_middleware], context_value=ContextMock()) yield lb, client, schema shutil.rmtree(fixture_working_dir, ignore_errors=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, repository, component_id, revision, client_mutation_id=None): username = get_logged_in_username() # Load LabBook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Create Component Manager cm = ComponentManager(lb) cm.add_component("custom", repository, component_id, revision, force=True) # TODO: get cursor by checking how many packages are already installed new_edge = CustomComponentConnection.Edge(node=CustomComponent( repository=repository, component_id=component_id, revision=revision), cursor=0) return AddCustomComponent(new_custom_component_edge=new_edge)
def test_publish_basic(self, fixture_working_dir, remote_bare_repo, mock_create_labbooks_no_lfs): # Mock the request context so a fake authorization header is present builder = EnvironBuilder(path='/labbook', method='POST', headers={'Authorization': 'Bearer AJDFHASD'}) env = builder.get_environ() req = Request(environ=env) test_user_lb = LabBook(mock_create_labbooks_no_lfs[0]) test_user_lb.from_name('default', 'default', 'labbook1') publish_query = f""" mutation c {{ publishLabbook(input: {{ labbookName: "labbook1", owner: "default" }}) {{ success }} }} """ r = mock_create_labbooks_no_lfs[2].execute(publish_query, context_value=req) assert 'errors' not in r assert r['data']['publishLabbook']['success'] is True
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() logger.info(f'Exporting LabBook: {username}/{owner}/{labbook_name}') working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) job_metadata = {'method': 'export_labbook_as_zip', 'labbook': lb.key} job_kwargs = { 'labbook_path': lb.root_dir, 'lb_export_directory': os.path.join(working_directory, 'export') } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task(jobs.export_labbook_as_zip, kwargs=job_kwargs, metadata=job_metadata) logger.info( f"Exporting LabBook {lb.root_dir} in background job with key {job_key.key_str}" ) return ExportLabbook(job_key=job_key.key_str)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): # Load LabBook username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) # Extract valid Bearer token if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) # BVB -- Should this defer to `sync` if Labbook's remote is already set? # Otherwise, it will throw an exception, which may still be ok. wf = GitWorkflow(labbook=lb) wf.publish(username=username, access_token=token) return PublishLabbook(success=True)
def test_custom_counts(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the a LabBook's package manager dependencies""" # Create labbook lb = LabBook(fixture_working_dir_env_repo_scoped[0]) lb.new(owner={"username": "******"}, name="labbook55", description="my first labbook10000") cm = ComponentManager(lb) # Add packages cm.add_component("custom", ENV_UNIT_TEST_REPO, "pillow", 0) cm.add_component("custom", ENV_UNIT_TEST_REPO, "noop-2", 0) cm.add_component("custom", ENV_UNIT_TEST_REPO, "noop-1", 0) query = """ { labbook(owner: "default", name: "labbook55") { overview { numAptPackages numConda2Packages numConda3Packages numPipPackages numCustomDependencies } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, remote_url, client_mutation_id=None): username = get_logged_in_username() logger.info(f"Importing remote labbook from {remote_url}") lb = LabBook(author=get_logged_in_author()) # TODO: Future work will look up remote in LabBook data, allowing user to select remote. default_remote = lb.labmanager_config.config['git']['default_remote'] admin_service = None for remote in lb.labmanager_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.labmanager_config.config['git']['remotes'][ remote]['admin_service'] break # Extract valid Bearer token if hasattr(info.context, 'headers' ) and "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) mgr = GitLabManager(default_remote, admin_service, token) mgr.configure_git_credentials(default_remote, username) lb.from_remote(remote_url, username, owner, labbook_name) return ImportRemoteLabbook(active_branch=lb.active_branch)
def test_add_package(self, fixture_working_dir_env_repo_scoped, snapshot): """Test listing labbooks""" lb = LabBook(fixture_working_dir_env_repo_scoped[0]) labbook_dir = lb.new(name="catbook-package-tester", description="LB to test package mutation", owner={"username": "******"}) # Add a base image pkg_query = """ mutation myPkgMutation { addPackageComponents (input: { owner: "default", labbookName: "catbook-package-tester", packages: [{manager: "conda3", package: "requests", version: "2.18.4"}] }) { clientMutationId newPackageComponentEdges { node{ id schema manager package version fromBase } cursor } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(pkg_query))
def test_rename_labbook(self, fixture_working_dir): """Test renaming a labbook""" # Create a dummy labbook to make sure directory structure is set up lb_dummy = LabBook(fixture_working_dir[0]) lb_dummy.new(owner={"username": "******"}, name="dummy-lb", description="Tester dummy lb") # Unzip test labbook into working directory test_zip_file = os.path.join(resource_filename('lmsrvlabbook', 'tests'), 'data', 'test-labbook.zip') labbooks_dir = os.path.join(fixture_working_dir[1], 'default', 'default', 'labbooks') with ZipFile(test_zip_file) as zf: zf.extractall(labbooks_dir) original_dir = os.path.join(labbooks_dir, 'test-labbook') new_dir = os.path.join(labbooks_dir, 'test-new-name') # rename (without the container being previously built) query = f""" mutation myMutation{{ renameLabbook(input:{{owner:"default", originalLabbookName: "test-labbook", newLabbookName: "test-new-name"}}) {{ success }} }} """ r = fixture_working_dir[2].execute(query) assert r['data']['renameLabbook'] is None assert 'errors' in r assert 'NotImplemented' in r['errors'][0]['message']
def test_package_query(self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for package info""" # Create labbook lb = LabBook(fixture_working_dir_env_repo_scoped[0]) lb.new(owner={"username": "******"}, name="labbook6", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook6"){ id packages(packageInput: [ {manager: "pip", package: "numpy", version:"1.14.2"}, {manager: "pip", package: "scipy", version:""}]){ id manager package version isValid } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query))
def test_remove_custom_dep(self, fixture_working_dir_env_repo_scoped, snapshot): """Test removing a custom dependency""" lb = LabBook(fixture_working_dir_env_repo_scoped[0]) labbook_dir = lb.new(name="labbook-remove-custom", description="my first labbook", owner={"username": "******"}) # Add a custom dep query = """ mutation myEnvMutation{ addCustomComponent(input: { owner: "default", labbookName: "labbook-remove-custom", repository: "gig-dev_components2", componentId: "pillow", revision: 0 }) { clientMutationId newCustomComponentEdge { node{ repository componentId revision name description } } } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query)) # Verify file component_file = os.path.join(labbook_dir, '.gigantum', 'env', 'custom', "gig-dev_components2_pillow.yaml") assert os.path.exists(component_file) is True # Remove a custom dep query = """ mutation myEnvMutation{ removeCustomComponent(input: { owner: "default", labbookName: "labbook-remove-custom", repository: "gig-dev_components2", componentId: "pillow" }) { clientMutationId success } } """ snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query)) assert os.path.exists(component_file) is False
def get_labbook_instance(key: str): # Get identifying info from key username, owner_name, labbook_name = key.split('&') # Create Labbook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner_name, labbook_name) return lb
def test_create_user_note_check_vals(self, fixture_working_dir, snapshot): """Test to make sure keys and IDs are getting set OK""" # Create labbook lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="user-note-test", description="testing user notes") # Create a user note query = """ mutation makeUserNote { createUserNote(input: { owner: "default", labbookName: "user-note-test", title: "I think this is a thing", body: "##AND THIS IS A BODY\\n- asdggf\\n-asdf", tags: ["this", "and", "that"] }) { newActivityRecordEdge { node{ message detailObjects{ id key data type show importance tags } id commit linkedCommit type show importance tags } cursor } } } """ result = fixture_working_dir[2].execute(query) assert len(result['data']['createUserNote']['newActivityRecordEdge']['node']['id']) > 10 assert type(result['data']['createUserNote']['newActivityRecordEdge']['node']['id']) == str assert len(result['data']['createUserNote']['newActivityRecordEdge']['node']['commit']) == 40 assert type(result['data']['createUserNote']['newActivityRecordEdge']['node']['commit']) == str assert result['data']['createUserNote']['newActivityRecordEdge']['node']['linkedCommit'] == "no-linked-commit" assert result['data']['createUserNote']['newActivityRecordEdge']['node']['message'] == "I think this is a thing" assert len(result['data']['createUserNote']['newActivityRecordEdge']['node']['detailObjects'][0]['id']) > 10 assert type(result['data']['createUserNote']['newActivityRecordEdge']['node']['detailObjects'][0]['id']) == str assert len(result['data']['createUserNote']['newActivityRecordEdge']['node']['detailObjects'][0]['key']) > 10 assert type(result['data']['createUserNote']['newActivityRecordEdge']['node']['detailObjects'][0]['key']) == str assert "AND THIS IS A BODY" in result['data']['createUserNote']['newActivityRecordEdge']['node']['detailObjects'][0]['data'][0][1]
def mutate_and_get_payload(cls, root, info, owner, labbook_name, dev_tool, container_override_id=None, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb_ip, _ = ContainerOperations.get_labbook_ip(lb, username) lb_port = 8888 lb_endpoint = f'http://{lb_ip}:{lb_port}' pr = confhttpproxy.ProxyRouter.get_proxy( lb.labmanager_config.config['proxy']) routes = pr.routes est_target = [ k for k in routes.keys() if lb_endpoint in routes[k]['target'] and 'jupyter' in k ] if len(est_target) == 1: suffix = est_target[0] elif len(est_target) == 0: rt_prefix = str(uuid.uuid4()).replace('-', '')[:8] rt_prefix, _ = pr.add(lb_endpoint, f'jupyter/{rt_prefix}') # Start jupyterlab _, suffix = ContainerOperations.start_dev_tool( lb, dev_tool_name=dev_tool, username=username, tag=container_override_id, proxy_prefix=rt_prefix) # Ensure we start monitor IFF jupyter isn't already running. start_labbook_monitor(lb, username, dev_tool, url=f'{lb_endpoint}/{rt_prefix}', author=get_logged_in_author()) else: raise ValueError(f"Multiple Jupyter instances for {str(lb)}") # Don't include the port in the path if running on 80 apparent_proxy_port = lb.labmanager_config.config['proxy'][ "apparent_proxy_port"] if apparent_proxy_port == 80: path = suffix else: path = f':{apparent_proxy_port}{suffix}' return StartDevTool(path=path)
def test_sync_1(self, remote_bare_repo, mock_create_labbooks_no_lfs, mock_config_file): # Setup responses mock for this test responses.add(responses.GET, 'https://usersrv.gigantum.io/key', json={'key': 'afaketoken'}, status=200) test_user_lb = LabBook(mock_create_labbooks_no_lfs[0]) test_user_lb.from_name('default', 'default', 'labbook1') test_user_wf = GitWorkflow(test_user_lb) test_user_wf.publish('default') # Mock the request context so a fake authorization header is present builder = EnvironBuilder(path='/labbook', method='POST', headers={'Authorization': 'Bearer AJDFHASD'}) env = builder.get_environ() req = Request(environ=env) remote_url = test_user_lb.root_dir assert remote_url sally_lb = LabBook(mock_config_file[0]) sally_lb.from_remote(remote_url, username="******", owner="default", labbook_name="labbook1") sally_wf = GitWorkflow(sally_lb) assert sally_lb.active_branch == "gm.workspace-sally" sally_lb.makedir(relative_path='code/sally-dir', create_activity_record=True) sally_wf.sync('sally') sync_query = """ mutation x { syncLabbook(input: { labbookName: "labbook1", owner: "default" }) { updateCount updatedLabbook { isRepoClean } } } """ r = mock_create_labbooks_no_lfs[2].execute(sync_query, context_value=req) assert 'errors' not in r assert r['data']['syncLabbook']['updateCount'] == 1 assert r['data']['syncLabbook']['updatedLabbook']['isRepoClean'] is True assert test_user_lb.active_branch == 'gm.workspace-default'
def test_detail_record_node(self, fixture_working_dir, snapshot, fixture_test_file): """Test getting an detail record by node ID""" lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="labbook1", description="my test description") FileOperations.insert_file(lb, "code", fixture_test_file) # Get activity record to query = """ { labbook(name: "labbook1", owner: "default") { activityRecords { edges{ node{ id commit linkedCommit message type show importance tags detailObjects{ id key type data show importance tags } } } } } } """ result1 = fixture_working_dir[2].execute(query) query = """ {{ node(id: "{}") {{ ... on ActivityDetailObject {{ id key type data show importance tags }} }} }} """.format(result1['data']['labbook']['activityRecords']['edges'][0]['node']['detailObjects'][0]['id']) result2 = fixture_working_dir[2].execute(query) assert result2['data']['node'] == result1['data']['labbook']['activityRecords']['edges'][0]['node']['detailObjects'][0]
def test_get_custom(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the a LabBook's custom dependencies""" # Create labbook lb = LabBook(fixture_working_dir_env_repo_scoped[0]) lb.new(owner={"username": "******"}, name="labbook3", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook3") { environment { customDependencies { edges { node { id schema componentId repository revision name description tags license url requiredPackageManagers dockerSnippet } cursor } pageInfo { hasNextPage hasPreviousPage } } } } } """ # should be null snapshot.assert_match( fixture_working_dir_env_repo_scoped[2].execute(query)) # Add a base image cm = ComponentManager(lb) cm.add_component("custom", ENV_UNIT_TEST_REPO, "pillow", 0) # Test again r2 = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r2 snapshot.assert_match(r2)
def test_custom_docker_snippet_success( self, fixture_working_dir_env_repo_scoped): """Test adding a custom dependency""" lb = LabBook(fixture_working_dir_env_repo_scoped[0]) labbook_dir = lb.new(name="custom-docker-lb-unittest", description="Testing custom docker and stuff", owner={"username": "******"}) client = fixture_working_dir_env_repo_scoped[2] query = """ mutation addCustomDocker($labbook_name: String!, $owner: String!, $custom_docker: String!) { addCustomDocker(input: { owner: $owner, labbookName: $labbook_name, dockerContent: $custom_docker }) { updatedEnvironment { dockerSnippet } } } """ vars = { 'labbook_name': "custom-docker-lb-unittest", 'owner': 'default', 'custom_docker': "RUN true" } r = client.execute(query, variable_values=vars) assert 'errors' not in r assert r['data']['addCustomDocker']['updatedEnvironment'][ 'dockerSnippet'] == "RUN true" remove_query = """ mutation removeCustomDocker($labbook_name: String!, $owner: String!) { removeCustomDocker(input: { owner: $owner, labbookName: $labbook_name }) { updatedEnvironment { dockerSnippet } } } """ vars = { 'labbook_name': "custom-docker-lb-unittest", 'owner': 'default' } r = client.execute(remove_query, variable_values=vars) assert 'errors' not in r assert r['data']['removeCustomDocker']['updatedEnvironment'][ 'dockerSnippet'] == ""
def mock_create_labbooks(fixture_working_dir): # Create a labbook in the temporary directory lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="labbook1", description="Cats labbook 1") # Create a file in the dir with open(os.path.join(fixture_working_dir[1], 'sillyfile'), 'w') as sf: sf.write("1234567") sf.seek(0) FileOperations.insert_file(lb, 'code', sf.name) assert os.path.isfile(os.path.join(lb.root_dir, 'code', 'sillyfile')) # name of the config file, temporary working directory, the schema yield fixture_working_dir
def mutate_and_process_upload(cls, info, owner, labbook_name, section, file_path, chunk_upload_params, transaction_id, client_mutation_id=None): if not cls.upload_file_path: logger.error('No file uploaded') raise ValueError('No file uploaded') try: username = get_logged_in_username() working_directory = Configuration().config['git'] \ ['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) dstpath = os.path.join(os.path.dirname(file_path), cls.filename) fops = FileOperations.put_file(labbook=lb, section=section, src_file=cls.upload_file_path, dst_path=dstpath, txid=transaction_id) finally: try: logger.debug(f"Removing temp file {cls.upload_file_path}") os.remove(cls.upload_file_path) except FileNotFoundError: pass # Create data to populate edge create_data = { 'owner': owner, 'name': labbook_name, 'section': section, 'key': fops['key'], '_file_info': fops } # TODO: Fix cursor implementation.. # this currently doesn't make sense when adding edges cursor = base64.b64encode(f"{0}".encode('utf-8')) return AddLabbookFile(new_labbook_file_edge=LabbookFileConnection.Edge( node=LabbookFile(**create_data), cursor=cursor))
def test_load_many(self, fixture_working_dir): """Test loading many labbooks""" lb = LabBook(fixture_working_dir[0]) lb.new(owner={"username": "******"}, name="labbook1", description="my first labbook1") lb.new(owner={"username": "******"}, name="labbook2", description="my first labbook2") lb.new(username="******", owner={"username": "******"}, name="labbook2", description="my first labbook3") loader = LabBookLoader() keys = [ "default&default&labbook1", "default&default&labbook2", "default&test3&labbook2" ] promise1 = loader.load_many(keys) assert isinstance(promise1, Promise) lb_list = promise1.get() assert lb_list[0].name == "labbook1" assert lb_list[0].description == "my first labbook1" assert lb_list[1].name == "labbook2" assert lb_list[1].description == "my first labbook2" assert lb_list[2].name == "labbook2" assert lb_list[2].description == "my first labbook3"
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # TODO - Should we cehck if a custom docker component already exists? cm = ComponentManager(lb) cm.remove_docker_snippet(cm.DEFAULT_CUSTOM_DOCKER_NAME) return RemoveCustomDocker( updated_environment=Environment(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, docker_content, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) docker_lines = [n for n in docker_content.strip().split('\n') if n] cm = ComponentManager(lb) cm.add_docker_snippet(cm.DEFAULT_CUSTOM_DOCKER_NAME, docker_lines) return AddCustomDocker( updated_environment=Environment(owner=owner, name=labbook_name))
def resolve_is_local(self, info): """Return the modified timestamp Args: info: Returns: """ try: lb = LabBook() lb.from_name(get_logged_in_username(), self.owner, self.name) return True except ValueError: return False
def mutate_and_get_payload(cls, root, info, owner, labbook_name, confirm, client_mutation_id=None): if confirm is True: # Load config data configuration = Configuration().config # Extract valid Bearer token token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError( "Authorization header not provided. Cannot perform remote delete operation." ) # Get remote server configuration default_remote = configuration['git']['default_remote'] admin_service = None for remote in configuration['git']['remotes']: if default_remote == remote: admin_service = configuration['git']['remotes'][remote][ 'admin_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Perform delete operation mgr = GitLabManager(default_remote, admin_service, access_token=token) mgr.remove_labbook(owner, labbook_name) logger.info( f"Deleted {owner}/{labbook_name} from the remote repository {default_remote}" ) # Remove locally any references to that cloud repo that's just been deleted. try: lb = LabBook() lb.from_name(get_logged_in_username(), owner, labbook_name) lb.remove_remote() lb.remove_lfs_remotes() except ValueError as e: logger.warning(e) return DeleteLabbook(success=True) else: logger.info( f"Dry run deleting {labbook_name} from remote repository -- not deleted." ) return DeleteLabbook(success=False)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, force=False, client_mutation_id=None): # Load LabBook username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) # Extract valid Bearer token token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) default_remote = lb.labmanager_config.config['git']['default_remote'] admin_service = None for remote in lb.labmanager_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.labmanager_config.config['git']['remotes'][ remote]['admin_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Configure git creds mgr = GitLabManager(default_remote, admin_service, access_token=token) mgr.configure_git_credentials(default_remote, username) wf = GitWorkflow(labbook=lb) cnt = wf.sync(username=username, force=force) # Create an updated graphne Labbook instance to return for convenience of Relay. updatedl = LabbookObject(owner=owner, name=labbook_name) return SyncLabbook(update_count=cnt, updated_labbook=updatedl)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb, container_id, ports = ContainerOperations.start_container( labbook=lb, username=username) logger.info( f'Started new {lb} container ({container_id}) with ports {ports}') return StartContainer( environment=Environment(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, key, description=None, is_dir=False, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Add Favorite if is_dir: is_dir = is_dir # Make sure trailing slashes are always present when favoriting a dir if key[-1] != "/": key = f"{key}/" new_favorite = lb.create_favorite(section, key, description=description, is_dir=is_dir) # Create data to populate edge create_data = { "id": f"{owner}&{labbook_name}&{section}&{key}", "owner": owner, "section": section, "name": labbook_name, "key": key, "index": new_favorite['index'], "_favorite_data": new_favorite } # Create cursor cursor = base64.b64encode( f"{str(new_favorite['index'])}".encode('utf-8')) return AddLabbookFavorite( new_favorite_edge=LabbookFavoriteConnection.Edge( node=LabbookFavorite(**create_data), cursor=cursor))
def savehook(username, owner, labbook_name): try: redis_conn = redis.Redis(db=1) lb_key = '-'.join( ['gmlb', username, owner, labbook_name, 'jupyter-token']) changed_file = request.args.get('file') jupyter_token = request.args.get('jupyter_token') logger.info( f"Received save hook for {changed_file} in {username}/{owner}/{labbook_name}" ) r = redis_conn.get(lb_key.encode()) if r is None: logger.error(f"Could not find redis key `{lb_key}`") abort(400) if r.decode() != jupyter_token: raise ValueError("Incoming jupyter token must match key in Redis") lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) logger.info(f"Jupyter save hook saving {changed_file} from {str(lb)}") with lb.lock_labbook(): lb.sweep_uncommitted_changes() return 'success' except Exception as err: logger.error(err) return abort(400)