def test_publish_basic(self, fixture_working_dir, remote_bare_repo, mock_create_labbooks_no_lfs): # Mock the request context so a fake authorization header is present builder = EnvironBuilder(path='/labbook', method='POST', headers={'Authorization': 'Bearer AJDFHASD'}) env = builder.get_environ() req = Request(environ=env) test_user_lb = LabBook(mock_create_labbooks_no_lfs[0]) test_user_lb.from_name('default', 'default', 'labbook1') publish_query = f""" mutation c {{ publishLabbook(input: {{ labbookName: "labbook1", owner: "default" }}) {{ success }} }} """ r = mock_create_labbooks_no_lfs[2].execute(publish_query, context_value=req) assert 'errors' not in r assert r['data']['publishLabbook']['success'] is True
def test_delete_dir(self, mock_create_labbooks): lb = LabBook(mock_create_labbooks[0]) lb.from_name('default', 'default', 'labbook1') lb.makedir('code/subdir') lb.git.add_all('code/') lb.git.commit("blah") dir_path = os.path.join(lb.root_dir, 'code', 'subdir') assert os.path.exists(dir_path) is True # Note, deleting a file should work with and without a trailing / at the end. query = """ mutation deleteLabbookFile { deleteLabbookFile( input: { owner: "default", labbookName: "labbook1", section: "code", filePath: "subdir/", isDirectory: true }) { success } } """ res = mock_create_labbooks[2].execute(query) assert res['data']['deleteLabbookFile']['success'] is True assert os.path.exists(dir_path) is False assert os.path.exists(os.path.join(lb.root_dir, 'code')) is True
def mutate_and_get_payload(cls, root, info, owner, labbook_name, repository, component_id, revision, client_mutation_id=None): username = get_logged_in_username() # Load LabBook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Create Component Manager cm = ComponentManager(lb) cm.add_component("custom", repository, component_id, revision, force=True) # TODO: get cursor by checking how many packages are already installed new_edge = CustomComponentConnection.Edge(node=CustomComponent( repository=repository, component_id=component_id, revision=revision), cursor=0) return AddCustomComponent(new_custom_component_edge=new_edge)
def savehook(username, owner, labbook_name): try: redis_conn = redis.Redis(db=1) lb_key = '-'.join( ['gmlb', username, owner, labbook_name, 'jupyter-token']) changed_file = request.args.get('file') jupyter_token = request.args.get('jupyter_token') logger.info( f"Received save hook for {changed_file} in {username}/{owner}/{labbook_name}" ) r = redis_conn.get(lb_key.encode()) if r is None: logger.error(f"Could not find redis key `{lb_key}`") abort(400) if r.decode() != jupyter_token: raise ValueError("Incoming jupyter token must match key in Redis") lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) logger.info(f"Jupyter save hook saving {changed_file} from {str(lb)}") with lb.lock_labbook(): lb.sweep_uncommitted_changes() return 'success' except Exception as err: logger.error(err) return abort(400)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, description_content, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb.description = description_content with lb.lock_labbook(): lb.git.add(os.path.join(lb.root_dir, '.gigantum/labbook.yaml')) commit = lb.git.commit('Updating description') # Create detail record adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False) adr.add_value('text/plain', "Updated description of LabBook") # Create activity record ar = ActivityRecord(ActivityType.LABBOOK, message="Updated description of LabBook", linked_commit=commit.hexsha, tags=["labbook"], show=False) ar.add_detail_object(adr) # Store ars = ActivityStore(lb) ars.create_activity_record(ar) return SetLabbookDescription(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, confirm, client_mutation_id=None): if confirm is True: # Load config data configuration = Configuration().config # Extract valid Bearer token token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError( "Authorization header not provided. Cannot perform remote delete operation." ) # Get remote server configuration default_remote = configuration['git']['default_remote'] admin_service = None for remote in configuration['git']['remotes']: if default_remote == remote: admin_service = configuration['git']['remotes'][remote][ 'admin_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Perform delete operation mgr = GitLabManager(default_remote, admin_service, access_token=token) mgr.remove_labbook(owner, labbook_name) logger.info( f"Deleted {owner}/{labbook_name} from the remote repository {default_remote}" ) # Remove locally any references to that cloud repo that's just been deleted. try: lb = LabBook() lb.from_name(get_logged_in_username(), owner, labbook_name) lb.remove_remote() lb.remove_lfs_remotes() except ValueError as e: logger.warning(e) return DeleteLabbook(success=True) else: logger.info( f"Dry run deleting {labbook_name} from remote repository -- not deleted." ) return DeleteLabbook(success=False)
def get_labbook_instance(key: str): # Get identifying info from key username, owner_name, labbook_name = key.split('&') # Create Labbook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner_name, labbook_name) return lb
def mutate_and_get_payload(cls, root, info, owner, labbook_name, dev_tool, container_override_id=None, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb_ip, _ = ContainerOperations.get_labbook_ip(lb, username) lb_port = 8888 lb_endpoint = f'http://{lb_ip}:{lb_port}' pr = confhttpproxy.ProxyRouter.get_proxy( lb.labmanager_config.config['proxy']) routes = pr.routes est_target = [ k for k in routes.keys() if lb_endpoint in routes[k]['target'] and 'jupyter' in k ] if len(est_target) == 1: suffix = est_target[0] elif len(est_target) == 0: rt_prefix = str(uuid.uuid4()).replace('-', '')[:8] rt_prefix, _ = pr.add(lb_endpoint, f'jupyter/{rt_prefix}') # Start jupyterlab _, suffix = ContainerOperations.start_dev_tool( lb, dev_tool_name=dev_tool, username=username, tag=container_override_id, proxy_prefix=rt_prefix) # Ensure we start monitor IFF jupyter isn't already running. start_labbook_monitor(lb, username, dev_tool, url=f'{lb_endpoint}/{rt_prefix}', author=get_logged_in_author()) else: raise ValueError(f"Multiple Jupyter instances for {str(lb)}") # Don't include the port in the path if running on 80 apparent_proxy_port = lb.labmanager_config.config['proxy'][ "apparent_proxy_port"] if apparent_proxy_port == 80: path = suffix else: path = f':{apparent_proxy_port}{suffix}' return StartDevTool(path=path)
def test_sync_1(self, remote_bare_repo, mock_create_labbooks_no_lfs, mock_config_file): # Setup responses mock for this test responses.add(responses.GET, 'https://usersrv.gigantum.io/key', json={'key': 'afaketoken'}, status=200) test_user_lb = LabBook(mock_create_labbooks_no_lfs[0]) test_user_lb.from_name('default', 'default', 'labbook1') test_user_wf = GitWorkflow(test_user_lb) test_user_wf.publish('default') # Mock the request context so a fake authorization header is present builder = EnvironBuilder(path='/labbook', method='POST', headers={'Authorization': 'Bearer AJDFHASD'}) env = builder.get_environ() req = Request(environ=env) remote_url = test_user_lb.root_dir assert remote_url sally_lb = LabBook(mock_config_file[0]) sally_lb.from_remote(remote_url, username="******", owner="default", labbook_name="labbook1") sally_wf = GitWorkflow(sally_lb) assert sally_lb.active_branch == "gm.workspace-sally" sally_lb.makedir(relative_path='code/sally-dir', create_activity_record=True) sally_wf.sync('sally') sync_query = """ mutation x { syncLabbook(input: { labbookName: "labbook1", owner: "default" }) { updateCount updatedLabbook { isRepoClean } } } """ r = mock_create_labbooks_no_lfs[2].execute(sync_query, context_value=req) assert 'errors' not in r assert r['data']['syncLabbook']['updateCount'] == 1 assert r['data']['syncLabbook']['updatedLabbook']['isRepoClean'] is True assert test_user_lb.active_branch == 'gm.workspace-default'
def mutate_and_get_payload(cls, root, info, owner, labbook_name, remote_name, remote_url, client_mutation_id=None): username = get_logged_in_username() logger.info(f"Adding labbook remote {remote_name} {remote_url}") lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb.add_remote(remote_name, remote_url) return AddLabbookRemote(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # TODO - Should we cehck if a custom docker component already exists? cm = ComponentManager(lb) cm.remove_docker_snippet(cm.DEFAULT_CUSTOM_DOCKER_NAME) return RemoveCustomDocker( updated_environment=Environment(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, docker_content, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) docker_lines = [n for n in docker_content.strip().split('\n') if n] cm = ComponentManager(lb) cm.add_docker_snippet(cm.DEFAULT_CUSTOM_DOCKER_NAME, docker_lines) return AddCustomDocker( updated_environment=Environment(owner=owner, name=labbook_name))
def resolve_is_local(self, info): """Return the modified timestamp Args: info: Returns: """ try: lb = LabBook() lb.from_name(get_logged_in_username(), self.owner, self.name) return True except ValueError: return False
def mutate_and_get_payload(cls, root, info, owner, labbook_name, content, client_mutation_id=None): logged_in_username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(logged_in_username, owner, labbook_name) # Write data lb.write_readme(content) return WriteReadme( updated_labbook=Labbook(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb, container_id, ports = ContainerOperations.start_container( labbook=lb, username=username) logger.info( f'Started new {lb} container ({container_id}) with ports {ports}') return StartContainer( environment=Environment(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, remote_name, client_mutation_id=None): username = get_logged_in_username() logger.info(f"Importing remote labbook from {remote_name}") lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) remote = remote_name if remote: lb.push(remote=remote) else: lb.push() return PushActiveBranchToRemote(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, key, description=None, is_dir=False, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Add Favorite if is_dir: is_dir = is_dir # Make sure trailing slashes are always present when favoriting a dir if key[-1] != "/": key = f"{key}/" new_favorite = lb.create_favorite(section, key, description=description, is_dir=is_dir) # Create data to populate edge create_data = { "id": f"{owner}&{labbook_name}&{section}&{key}", "owner": owner, "section": section, "name": labbook_name, "key": key, "index": new_favorite['index'], "_favorite_data": new_favorite } # Create cursor cursor = base64.b64encode( f"{str(new_favorite['index'])}".encode('utf-8')) return AddLabbookFavorite( new_favorite_edge=LabbookFavoriteConnection.Edge( node=LabbookFavorite(**create_data), cursor=cursor))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, manager, packages, client_mutation_id=None): username = get_logged_in_username() # Load LabBook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Create Component Manager cm = ComponentManager(lb) cm.remove_packages(package_manager=manager, package_names=packages) return RemovePackageComponents(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, repository, component_id, client_mutation_id=None): username = get_logged_in_username() # Load LabBook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Create Component Manager cm = ComponentManager(lb) cm.remove_component("custom", repository, component_id) return RemoveCustomComponent(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, packages, client_mutation_id=None): username = get_logged_in_username() # Load LabBook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) manager = list(set([x['manager'] for x in packages])) if len(manager) != 1: raise ValueError( "Only batch add packages via 1 package manager at a time.") manager = manager[0] # Set the cursor to the end of the collection of packages glob_path = os.path.join(lb.root_dir, '.gigantum', 'env', 'package_manager', f"{manager}*") cursor = len(glob.glob(glob_path)) # Create Component Manager cm = ComponentManager(lb) cm.add_packages(package_manager=manager, packages=packages, from_base=False, force=True) new_edges = list() for cnt, pkg in enumerate(packages): new_edges.append( PackageComponentConnection.Edge( node=PackageComponent(manager=manager, package=pkg["package"], version=pkg["version"], schema=CURRENT_SCHEMA), cursor=base64.b64encode(str(cursor + cnt).encode()).decode())) return AddPackageComponents(new_package_component_edges=new_edges)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb_ip, _ = ContainerOperations.get_labbook_ip(lb, username) stop_labbook_monitor(lb, username) lb, stopped = ContainerOperations.stop_container(labbook=lb, username=username) try: # We know `git gc` fails on windows, so just give best effort fire-and-forget wf = GitWorkflow(lb) wf.garbagecollect() except Exception as e: logger.error(e) # Try to remove route from proxy lb_port = 8888 lb_endpoint = f'http://{lb_ip}:{lb_port}' pr = confhttpproxy.ProxyRouter.get_proxy( lb.labmanager_config.config['proxy']) routes = pr.routes est_target = [ k for k in routes.keys() if lb_endpoint in routes[k]['target'] and 'jupyter' in k ] if len(est_target) == 1: pr.remove(est_target[0][1:]) if not stopped: raise ValueError(f"Failed to stop labbook {labbook_name}") return StopContainer( environment=Environment(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, username, client_mutation_id=None): logged_in_username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(logged_in_username, owner, labbook_name) # TODO: Future work will look up remote in LabBook data, allowing user to select remote. default_remote = lb.labmanager_config.config['git']['default_remote'] admin_service = None for remote in lb.labmanager_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.labmanager_config.config['git']['remotes'][ remote]['admin_service'] break # Extract valid Bearer token if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) # Add collaborator to remote service mgr = GitLabManager(default_remote, admin_service, token) mgr.add_collaborator(owner, labbook_name, username) # Prime dataloader with labbook you just created dataloader = LabBookLoader() dataloader.prime(f"{username}&{username}&{lb.name}", lb) create_data = {"owner": owner, "name": labbook_name} return AddLabbookCollaborator(updated_labbook=Labbook(**create_data))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, key, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Manually generate the Node ID for now. This simplifies the connection between the file browser and favorites # widgets in the UI favorite_node_id = f"LabbookFavorite:{owner}&{labbook_name}&{section}&{key}" favorite_node_id = base64.b64encode(favorite_node_id.encode()).decode() # Remove Favorite lb.remove_favorite(section, key) return RemoveLabbookFavorite(success=True, removed_node_id=favorite_node_id)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, title, body=None, tags=None, client_mutation_id=None): username = get_logged_in_username() # Load LabBook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Create a Activity Store instance store = ActivityStore(lb) # Create detail record adr = ActivityDetailRecord(ActivityDetailType.NOTE, show=True, importance=255) if body: adr.add_value('text/markdown', body) # Create activity record ar = ActivityRecord(ActivityType.NOTE, message=title, linked_commit="no-linked-commit", importance=255, tags=tags) ar.add_detail_object(adr) ar = store.create_activity_record(ar) return CreateUserNote(new_activity_record_edge=ActivityConnection.Edge( node=ActivityRecordObject( owner=owner, name=labbook_name, commit=ar.commit), cursor=ar.commit))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, branch_name, client_mutation_id=None): """Method to perform mutation Args: input: context: info: Returns: """ username = get_logged_in_username() # Load an existing LabBook labbook_obj = LabBook(author=get_logged_in_author()) labbook_obj.from_name(username, owner, labbook_name) # Checkout labbook_obj.checkout_branch(branch_name) return CheckoutBranch(labbook=Labbook(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, branch_name, client_mutation_id=None): """Method to perform mutation Args: input: context: info: Returns: """ username = get_logged_in_username() # Load an existing LabBook labbook_obj = LabBook(author=get_logged_in_author()) labbook_obj.from_name(username, owner, labbook_name) # Create Branch labbook_obj.checkout_branch(branch_name, new=True) if labbook_obj.active_branch != branch_name: raise ValueError( f"Create branch failed, could not switch to new branch {branch_name}" ) # Create a LabbookRef to the branch create_data = { "owner": owner, "name": labbook_name, "prefix": None, "branch": branch_name, } return CreateBranch(branch=LabbookRef(**create_data))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, key, updated_index=None, updated_description=None, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Update Favorite new_favorite = lb.update_favorite(section, key, new_description=updated_description, new_index=updated_index) # Create data to populate edge create_data = { "id": f"{owner}&{labbook_name}&{section}&{key}", "owner": owner, "section": section, "key": key, "_favorite_data": new_favorite } # Create dummy cursor cursor = base64.b64encode( f"{str(new_favorite['index'])}".encode('utf-8')) return UpdateLabbookFavorite( updated_favorite_edge=LabbookFavoriteConnection.Edge( node=LabbookFavorite(**create_data), cursor=cursor))