def helper_resolve_favorites(self, labbook, kwargs): # Get all files and directories, with the exception of anything in .git or .gigantum edges = [x[1] for x in labbook.get_favorites(self.section).items()] cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = { "id": f"{self.owner}&{self.name}&{self.section}&{edge['key']}", "owner": self.owner, "section": self.section, "name": self.name, "index": int(edge['index']), "key": edge['key'], "_favorite_data": edge } edge_objs.append( LabbookFavoriteConnection.Edge( node=LabbookFavorite(**create_data), cursor=cursor)) return LabbookFavoriteConnection(edges=edge_objs, page_info=lbc.page_info)
def helper_resolve_secrets_file_mapping(labbook, kwargs): secrets_store = SecretStore(labbook, get_logged_in_username()) edges = secrets_store.secret_map.keys() if edges: cursors = [ base64.b64encode( "{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get DevEnv instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): node_obj = SecretFileMapping(owner=labbook.owner, name=labbook.name, filename=edge, mount_path=secrets_store[edge]) edge_objs.append( SecretFileMappingConnection.Edge(node=node_obj, cursor=cursor)) return SecretFileMappingConnection(edges=edge_objs, page_info=lbc.page_info) else: pi = graphene.relay.PageInfo(has_next_page=False, has_previous_page=False) return SecretFileMappingConnection(edges=[], page_info=pi)
def resolve_available_bases(self, info, **kwargs): """Method to return a all graphene BaseImages that are available Returns: list(Labbook) """ repo = ComponentRepository() edges = repo.get_component_list("base") cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get BaseImage instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): edge_objs.append( BaseComponentConnection.Edge(node=BaseComponent( repository=edge['###repository###'], component_id=edge['id'], revision=int(edge['revision'])), cursor=cursor)) return BaseComponentConnection(edges=edge_objs, page_info=lbc.page_info)
def helper_resolve_all_files(self, labbook, kwargs): """Helper method to populate the LabbookFileConnection""" # Get all files and directories, with the exception of anything in .git or .gigantum edges = labbook.walkdir(section=self.section, show_hidden=False) # Generate naive cursors cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = { "owner": self.owner, "section": self.section, "name": self.name, "key": edge['key'], "_file_info": edge } edge_objs.append( LabbookFileConnection.Edge(node=LabbookFile(**create_data), cursor=cursor)) return LabbookFileConnection(edges=edge_objs, page_info=lbc.page_info)
def resolve_available_custom_dependencies(self, info, **kwargs): """Method to return all graphene CustomDependencies that are available (at the latest version) Returns: CustomDependencyConnection """ repo = ComponentRepository() edges = repo.get_component_list("custom") cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get BaseImage instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): edge_objs.append( CustomComponentConnection.Edge(node=CustomComponent( repository=edge['###repository###'], component_id=edge['id'], revision=edge['revision']), cursor=cursor)) return CustomComponentConnection(edges=edge_objs, page_info=lbc.page_info)
def helper_resolve_custom_dependencies(labbook, kwargs): """Helper to generate custom dependencies (DEPRECATED)""" cm = ComponentManager(labbook) edges = cm.get_component_list("custom") if edges: cursors = [base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges)] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get DevEnv instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): edge_objs.append(CustomComponentConnection.Edge(node=CustomComponent(repository=edge['###repository###'], component_id=edge['id'], revision=edge['revision']), cursor=cursor)) return CustomComponentConnection(edges=edge_objs, page_info=lbc.page_info) else: return CustomComponentConnection(edges=[], page_info=graphene.relay.PageInfo(has_next_page=False, has_previous_page=False))
def helper_resolve_branches(self, lb, kwargs): # Get all edges and cursors. Here, cursors are just an index into the refs edges = [x for x in lb.git.repo.refs] cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get LabbookRef instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): parts = edge.name.split("/") if len(parts) > 1: prefix = parts[0] branch = parts[1] else: prefix = None branch = parts[0] create_data = { "name": lb.name, "owner": self.owner, "prefix": prefix, "ref_name": branch } edge_objs.append( LabbookRefConnection.Edge(node=LabbookRef(**create_data), cursor=cursor)) return LabbookRefConnection(edges=edge_objs, page_info=lbc.page_info)
def helper_resolve_package_dependencies(labbook, kwargs): """Helper to resolve the packages""" cm = ComponentManager(labbook) edges = cm.get_component_list("package_manager") if edges: cursors = [base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges)] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Create version dataloader keys = [f"{k['manager']}&{k['package']}" for k in lbc.edges] vd = PackageLatestVersionLoader(keys, labbook, get_logged_in_username()) # Get DevEnv instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): edge_objs.append(PackageComponentConnection.Edge(node=PackageComponent(_version_dataloader=vd, manager=edge['manager'], package=edge['package'], version=edge['version'], from_base=edge['from_base'], is_valid=True, schema=edge['schema']), cursor=cursor)) return PackageComponentConnection(edges=edge_objs, page_info=lbc.page_info) else: return PackageComponentConnection(edges=[], page_info=graphene.relay.PageInfo(has_next_page=False, has_previous_page=False))
def resolve_local_datasets(self, info, order_by: str, sort: str, **kwargs): """Method to return all graphene Dataset instances for the logged in user available locally Uses the "currently logged in" user Args: order_by(str): String specifying how datasets should be sorted sort(str): 'desc' for descending, 'asc' for ascending (default) Returns: list(Dataset) """ username = get_logged_in_username() if sort == "desc": reverse = True elif sort == "asc": reverse = False else: raise ValueError( f"Unsupported sort_str: {sort}. Use `desc`, `asc`") # Collect all datasets for all owners local_datasets = InventoryManager().list_datasets(username=username, sort_mode=order_by) if reverse: local_datasets.reverse() edges = [(ds.namespace, ds.name) for ds in local_datasets] cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get Dataset instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = { "id": "{}&{}".format(edge[0], edge[1]), "name": edge[1], "owner": edge[0] } edge_objs.append( DatasetConnection.Edge(node=Dataset(**create_data), cursor=cursor)) return DatasetConnection(edges=edge_objs, page_info=lbc.page_info)
def resolve_local_labbooks(self, info, sort: str, reverse: bool, **kwargs): """Method to return all graphene Labbook instances for the logged in user available locally Uses the "currently logged in" user Args: sort(sort_mode): String specifying how labbooks should be sorted reverse(bool): Reverse sorting if True Returns: list(Labbook) """ lb = LabBook() username = get_logged_in_username() # Collect all labbooks for all owners edges = lb.list_local_labbooks(username=username, sort_mode=sort, reverse=reverse) cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get Labbook instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = { "id": "{}&{}".format(edge["owner"], edge["name"]), "name": edge["name"], "owner": edge["owner"] } edge_objs.append( LabbookConnection.Edge(node=Labbook(**create_data), cursor=cursor)) return LabbookConnection(edges=edge_objs, page_info=lbc.page_info)
def helper_resolve_all_files(self, labbook, kwargs): """Helper method to populate the LabbookFileConnection""" # Check if file info has been cached redis_conn = redis.Redis(db=5) cache_key = f"FILE_LIST_CACHE|{labbook.key}|{self.section}" if redis_conn.exists(cache_key): # Load from cache edges = json.loads(redis_conn.get(cache_key).decode("utf-8")) redis_conn.expire(cache_key, 5) else: # Load from disk and cache # Get all files and directories, with the exception of anything in .git or .gigantum edges = FileOperations.walkdir(labbook, section=self.section, show_hidden=False) redis_conn.set(cache_key, json.dumps(edges)) redis_conn.expire(cache_key, 5) # Generate naive cursors cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = { "owner": self.owner, "section": self.section, "name": self.name, "key": edge['key'], "_file_info": edge } edge_objs.append( LabbookFileConnection.Edge(node=LabbookFile(**create_data), cursor=cursor)) return LabbookFileConnection(edges=edge_objs, page_info=lbc.page_info)
def resolve_background_jobs(self, info, **kwargs): """Method to return a all background jobs the system is aware of: Queued, Started, Finished, Failed. Returns: list(JobStatus) """ job_dispatcher = Dispatcher() edges: List[str] = [j.job_key.key_str for j in job_dispatcher.all_jobs] cursors = [base64.b64encode(f"{str(cnt)}".encode('utf-8')) for cnt, x in enumerate(edges)] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): edge_objs.append(JobStatusConnection.Edge(node=JobStatus(edge), cursor=cursor)) return JobStatusConnection(edges=edge_objs, page_info=lbc.page_info)
def helper_resolve_files(self, labbook, kwargs): """Helper method to populate the LabbookFileConnection""" base_dir = None if 'root_dir' in kwargs: if kwargs['root_dir']: base_dir = kwargs['root_dir'] + os.path.sep base_dir = base_dir.replace(os.path.sep + os.path.sep, os.path.sep) # Get all files and directories, with the exception of anything in .git or .gigantum edges = FileOperations.listdir(labbook, self.section, base_path=base_dir, show_hidden=False) # Generate naive cursors cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = { "owner": self.owner, "section": self.section, "name": self.name, "key": edge['key'], "_file_info": edge } edge_objs.append( LabbookFileConnection.Edge(node=LabbookFile(**create_data), cursor=cursor)) return LabbookFileConnection(edges=edge_objs, page_info=lbc.page_info)
def helper_resolve_all_files(self, dataset, kwargs): """Helper method to populate the DatasetFileConnection""" manifest = Manifest(dataset, get_logged_in_username()) # Generate naive cursors # TODO: Use manifest pagination interface edges = manifest.list() cursors = [base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges)] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = {"owner": self.owner, "name": self.name, "key": edge['key'], "_file_info": edge} edge_objs.append(DatasetFileConnection.Edge(node=DatasetFile(**create_data), cursor=cursor)) return DatasetFileConnection(edges=edge_objs, page_info=lbc.page_info)
def resolve_remote_labbooks(self, info, sort: str, reverse: bool, **kwargs): """Method to return a all RemoteLabbook instances for the logged in user This is a remote call, so should be fetched on its own and only when needed. The user must have a valid session for data to be returned. It is recommended to use large page size (e.g. 50-100). This is due to how the remote server returns all the available data at once, so it is more efficient to load a lot of records at a time. Args: sort(sort_mode): String specifying how labbooks should be sorted reverse(bool): Reverse sorting if True Supported sorting modes: - az: naturally sort - created_on: sort by creation date, newest first - modified_on: sort by modification date, newest first Returns: list(Labbook) """ # Load config data configuration = Configuration().config # Extract valid Bearer token token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError( "Authorization header not provided. Cannot list remote LabBooks." ) # Get remote server configuration default_remote = configuration['git']['default_remote'] admin_service = None for remote in configuration['git']['remotes']: if default_remote == remote: admin_service = configuration['git']['remotes'][remote][ 'admin_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Query backend for data mgr = GitLabManager(default_remote, admin_service, access_token=token) edges = mgr.list_labbooks(sort_mode=sort, reverse=reverse) cursors = [ base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get Labbook instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): create_data = { "id": "{}&{}".format(edge["namespace"], edge["labbook_name"]), "name": edge["labbook_name"], "owner": edge["namespace"], "description": edge["description"], "creation_date_utc": edge["created_on"], "modified_date_utc": edge["modified_on"] } edge_objs.append( RemoteLabbookConnection.Edge(node=RemoteLabbook(**create_data), cursor=cursor)) return RemoteLabbookConnection(edges=edge_objs, page_info=lbc.page_info)