def mutate_and_get_payload(cls, root, info, owner, labbook_name, other_branch_name, override_method="abort", client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) with lb.lock(): override = MergeOverride(override_method) bm = BranchManager(lb, username=username) if override == MergeOverride.ABORT: bm.merge_from(other_branch=other_branch_name) elif override == MergeOverride.OURS: bm.merge_use_ours(other_branch=other_branch_name) elif override == MergeOverride.THEIRS: bm.merge_use_theirs(other_branch=other_branch_name) else: raise ValueError(f"Unknown override method {override}") return MergeFromBranch( Labbook(id="{}&{}".format(owner, labbook_name), name=labbook_name, owner=owner))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, confirm, client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) if confirm: logger.info(f"Deleting {str(lb)}...") try: lb, stopped = ContainerOperations.stop_container( labbook=lb, username=username) except OSError as e: logger.warning(e) lb, docker_removed = ContainerOperations.delete_image( labbook=lb, username=username) if not docker_removed: raise ValueError( f'Cannot delete docker image for {str(lb)} - unable to delete Project from disk' ) datasets_to_schedule = InventoryManager().delete_labbook( username, owner, labbook_name) # Schedule jobs to clean the file cache for any linked datasets (if no other references exist) for cleanup_job in datasets_to_schedule: # Schedule Job to clear file cache if dataset is no longer in use job_metadata = {'method': 'clean_dataset_file_cache'} job_kwargs = { 'logged_in_username': username, 'dataset_owner': cleanup_job.namespace, 'dataset_name': cleanup_job.name, 'cache_location': cleanup_job.cache_root } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task( jobs.clean_dataset_file_cache, metadata=job_metadata, kwargs=job_kwargs) logger.info( f"Dispatched clean_dataset_file_cache({ cleanup_job.namespace}/{cleanup_job.name})" f" to Job {job_key}") # Verify Delete worked if os.path.exists(lb.root_dir): logger.error( f'Deleted {str(lb)} but root directory {lb.root_dir} still exists!' ) return DeleteLabbook(success=False) else: return DeleteLabbook(success=True) else: logger.info(f"Dry run in deleting {str(lb)} -- not deleted.") return DeleteLabbook(success=False)
def helper_resolve_secrets_file_mapping(labbook, kwargs): secrets_store = SecretStore(labbook, get_logged_in_username()) edges = secrets_store.secret_map.keys() if edges: cursors = [ base64.b64encode( "{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get DevEnv instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): node_obj = SecretFileMapping(owner=labbook.owner, name=labbook.name, filename=edge, mount_path=secrets_store[edge]) edge_objs.append( SecretFileMappingConnection.Edge(node=node_obj, cursor=cursor)) return SecretFileMappingConnection(edges=edge_objs, page_info=lbc.page_info) else: pi = graphene.relay.PageInfo(has_next_page=False, has_previous_page=False) return SecretFileMappingConnection(edges=[], page_info=pi)
def resolve_repository_name_is_available(self, info, name: str): """Resolver to check if a repository name is in use locally or remotely Args: info: name: desired name for the repository Returns: """ # Check if repository exists locally logged_in_username = get_logged_in_username() im = InventoryManager() if im.repository_exists(logged_in_username, logged_in_username, name): return False # Check if repository exists remotely remote_config = Configuration().get_remote_configuration() auth_service = None remote = None if remote_config: auth_service = remote_config.get('admin_service') remote = remote_config.get('git_remote') # Get collaborators from remote service mgr = GitLabManager(remote, auth_service, flask.g.access_token) if mgr.repository_exists(logged_in_username, name): return False # If you get here the name is available return True
def mutate_and_get_payload(cls, root, info, owner, labbook_name, remote_url, client_mutation_id=None): username = get_logged_in_username() logger.info(f"Importing remote labbook from {remote_url}") lb = LabBook(author=get_logged_in_author()) # TODO: Future work will look up remote in LabBook data, allowing user to select remote. default_remote = lb.labmanager_config.config['git']['default_remote'] admin_service = None for remote in lb.labmanager_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.labmanager_config.config['git']['remotes'][ remote]['admin_service'] break # Extract valid Bearer token if hasattr(info.context, 'headers' ) and "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) mgr = GitLabManager(default_remote, admin_service, token) mgr.configure_git_credentials(default_remote, username) lb.from_remote(remote_url, username, owner, labbook_name) return ImportRemoteLabbook(active_branch=lb.active_branch)
def mutate_and_get_payload(cls, root, info, dataset_owner, dataset_name, labbook_owner=None, labbook_name=None, client_mutation_id=None): logged_in_user = get_logged_in_username() # Schedule Job to clear file cache if dataset is no longer in use job_metadata = {'method': 'verify_dataset_contents'} job_kwargs = { 'logged_in_username': logged_in_user, 'access_token': flask.g.access_token, 'id_token': flask.g.id_token, 'dataset_owner': dataset_owner, 'dataset_name': dataset_name, 'labbook_owner': labbook_owner, 'labbook_name': labbook_name } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task(jobs.verify_dataset_contents, metadata=job_metadata, kwargs=job_kwargs) logger.info( f"Dispatched verify_dataset_contents({dataset_owner}/{dataset_name}) to Job {job_key}" ) return VerifyDataset(background_job_key=job_key)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, username, permissions, client_mutation_id=None): #TODO(billvb/dmk) - Here "username" refers to the intended recipient username. # it should probably be renamed here and in the frontend to "collaboratorUsername" logged_in_username = get_logged_in_username() lb = InventoryManager().load_labbook(logged_in_username, owner, labbook_name, author=get_logged_in_author()) # TODO: Future work will look up remote in LabBook data, allowing user to select remote. default_remote = lb.client_config.config['git']['default_remote'] admin_service = None for remote in lb.client_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.client_config.config['git']['remotes'][ remote]['admin_service'] break # Extract valid Bearer token if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. " "Must have a valid session to query for collaborators") if permissions == 'readonly': perm = ProjectPermissions.READ_ONLY elif permissions == 'readwrite': perm = ProjectPermissions.READ_WRITE elif permissions == 'owner': perm = ProjectPermissions.OWNER else: raise ValueError(f"Unknown permission set: {permissions}") mgr = GitLabManager(default_remote, admin_service, token) existing_collabs = mgr.get_collaborators(owner, labbook_name) if username not in [n[1] for n in existing_collabs]: logger.info(f"Adding user {username} to {owner}/{labbook_name}" f"with permission {perm}") mgr.add_collaborator(owner, labbook_name, username, perm) else: logger.warning(f"Changing permission of {username} on" f"{owner}/{labbook_name} to {perm}") mgr.delete_collaborator(owner, labbook_name, username) mgr.add_collaborator(owner, labbook_name, username, perm) create_data = {"owner": owner, "name": labbook_name} return AddLabbookCollaborator(updated_labbook=Labbook(**create_data))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, src_path, dst_path, client_mutation_id=None, **kwargs): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) with lb.lock(): mv_results = FileOperations.move_file(lb, section, src_path, dst_path) file_edges = list() for file_dict in mv_results: file_edges.append(LabbookFile(owner=owner, name=labbook_name, section=section, key=file_dict['key'], is_dir=file_dict['is_dir'], is_favorite=file_dict['is_favorite'], modified_at=file_dict['modified_at'], size=str(file_dict['size']))) cursors = [base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(file_edges)] edge_objs = [LabbookFileConnection.Edge(node=e, cursor=c) for e, c in zip(file_edges, cursors)] return MoveLabbookFile(updated_edges=edge_objs)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, directory, client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) with lb.lock(): FileOperations.makedir(lb, os.path.join(section, directory), create_activity_record=True) # Prime dataloader with labbook you already loaded dataloader = LabBookLoader() dataloader.prime(f"{owner}&{labbook_name}&{lb.name}", lb) # Create data to populate edge file_info = FileOperations.get_file_info(lb, section, directory) create_data = {'owner': owner, 'name': labbook_name, 'section': section, 'key': file_info['key'], '_file_info': file_info} # TODO: Fix cursor implementation, this currently doesn't make sense cursor = base64.b64encode(f"{0}".encode('utf-8')) return MakeLabbookDirectory( new_labbook_file_edge=LabbookFileConnection.Edge( node=LabbookFile(**create_data), cursor=cursor))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, repository, component_id, revision, client_mutation_id=None): username = get_logged_in_username() # Load LabBook instance lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) # Create Component Manager cm = ComponentManager(lb) cm.add_component("custom", repository, component_id, revision, force=True) # TODO: get cursor by checking how many packages are already installed new_edge = CustomComponentConnection.Edge(node=CustomComponent( repository=repository, component_id=component_id, revision=revision), cursor=0) return AddCustomComponent(new_custom_component_edge=new_edge)
def resolve_is_mergeable(self, info): lb = InventoryManager().load_labbook(get_logged_in_username(), self.owner, self.name) mergeable = self.branch_name in BranchManager(lb).branches_local \ and self.branch_name != BranchManager(lb).active_branch return mergeable
def mutate_and_get_payload(cls, root, info, owner, labbook_name, filename, mount_path, client_mutation_id=None): if len(mount_path) > 0 and mount_path[0] == '~': mount_path = mount_path.replace('~', '/home/giguser') if not cls._is_target_valid(mount_path): raise ValueError(f"Mount path {mount_path} is not a valid path") username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) with lb.lock(): secstore = SecretStore(lb, username) secstore[filename] = mount_path cls._record_insert_activity(secstore, filename, lb, mount_path) env = Environment(owner=owner, name=lb.name) return InsertSecretsEntry(environment=env)
def helper_resolve_package_dependencies(labbook, kwargs): """Helper to resolve the packages""" cm = ComponentManager(labbook) edges = cm.get_component_list("package_manager") if edges: cursors = [base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges)] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Create version dataloader keys = [f"{k['manager']}&{k['package']}" for k in lbc.edges] vd = PackageLatestVersionLoader(keys, labbook, get_logged_in_username()) # Get DevEnv instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): edge_objs.append(PackageComponentConnection.Edge(node=PackageComponent(_version_dataloader=vd, manager=edge['manager'], package=edge['package'], version=edge['version'], from_base=edge['from_base'], is_valid=True, schema=edge['schema']), cursor=cursor)) return PackageComponentConnection(edges=edge_objs, page_info=lbc.page_info) else: return PackageComponentConnection(edges=[], page_info=graphene.relay.PageInfo(has_next_page=False, has_previous_page=False))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, branch_name, revision=None, description=None, client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) with lb.lock(): bm = BranchManager(lb, username=username) full_branch_title = bm.create_branch(title=branch_name, revision=revision) logger.info( f"In {str(lb)} created new experimental feature branch: " f"{full_branch_title}") if description: cls._update_branch_description(lb, description) return CreateExperimentalBranch( Labbook(id="{}&{}".format(owner, labbook_name), name=labbook_name, owner=owner))
def _get_dataset_file_info(self, dataset) -> dict: """helper method to iterate over the manifest and get file info for the overview page Returns: None """ m = Manifest(dataset, get_logged_in_username()) count = 0 total_bytes = 0 file_type_distribution: OrderedDict = OrderedDict() for key in m.manifest: item = m.manifest[key] if key[-1] == '/': # Skip directories continue filename = os.path.basename(key) if filename[0] == '.': # Skip hidden files continue if '.' not in filename: # Skip files without an extension continue # Count file type distribution _, ext = os.path.splitext(filename) if ext: file_type = ext if file_type in file_type_distribution: file_type_distribution[file_type] += 1 else: file_type_distribution[file_type] = 1 # Count total file size total_bytes += int(item['b']) # Count files count += 1 # Format the output for file type distribution formatted_file_type_info: List[str] = list() file_type_distribution = OrderedDict( sorted(file_type_distribution.items(), key=itemgetter(1), reverse=True)) for file_type in file_type_distribution: percentage = float( file_type_distribution[file_type]) / float(count) formatted_file_type_info.append(f"{percentage:.2f}|{file_type}") self._dataset_file_info = { 'num_files': count, 'total_bytes': total_bytes, 'local_bytes': count, 'file_type_distribution': formatted_file_type_info } return self._dataset_file_info
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, key, description=None, is_dir=False, client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) # Add Favorite if is_dir: is_dir = is_dir # Make sure trailing slashes are always present when favoriting a dir if key[-1] != "/": key = f"{key}/" with lb.lock(): new_favorite = lb.create_favorite(section, key, description=description, is_dir=is_dir) # Create data to populate edge create_data = {"id": f"{owner}&{labbook_name}&{section}&{key}", "owner": owner, "section": section, "name": labbook_name, "key": key, "index": new_favorite['index'], "_favorite_data": new_favorite} # Create cursor cursor = base64.b64encode(f"{str(new_favorite['index'])}".encode('utf-8')) return AddLabbookFavorite(new_favorite_edge=LabbookFavoriteConnection.Edge(node=LabbookFavorite(**create_data), cursor=cursor))
def mutate_and_get_payload(cls, root, info, owner, dataset_name, description, client_mutation_id=None): username = get_logged_in_username() ds = InventoryManager().load_dataset(username, owner, dataset_name, author=get_logged_in_author()) ds.description = description with ds.lock(): ds.git.add(os.path.join(ds.root_dir, '.gigantum/gigantum.yaml')) commit = ds.git.commit('Updating description') adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False) adr.add_value('text/plain', f"Updated Dataset description: {description}") ar = ActivityRecord(ActivityType.LABBOOK, message="Updated Dataset description", linked_commit=commit.hexsha, tags=["dataset"], show=False) ar.add_detail_object(adr) ars = ActivityStore(ds) ars.create_activity_record(ar) return SetDatasetDescription( updated_dataset=Dataset(owner=owner, name=dataset_name))
def _configure_git(cls, lb, info) -> GitLabManager: # Extract valid Bearer token # TODO - This code is duplicated all over the place, must be refactored. token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError("Authorization header not provided. " "Must have a valid session to query for collaborators") default_remote = lb.client_config.config['git']['default_remote'] admin_service = None for remote in lb.client_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.client_config.config['git']['remotes'][remote]['admin_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Configure git creds mgr = GitLabManager(default_remote, admin_service, access_token=token) mgr.configure_git_credentials(default_remote, get_logged_in_username()) return mgr
def mutate_and_get_payload(cls, root, info, name, description, repository, base_id, revision, is_untracked=False, client_mutation_id=None): username = get_logged_in_username() inv_manager = InventoryManager() if is_untracked: lb = inv_manager.create_labbook_disabled_lfs( username=username, owner=username, labbook_name=name, description=description, author=get_logged_in_author()) else: lb = inv_manager.create_labbook(username=username, owner=username, labbook_name=name, description=description, author=get_logged_in_author()) if is_untracked: FileOperations.set_untracked(lb, 'input') FileOperations.set_untracked(lb, 'output') input_set = FileOperations.is_set_untracked(lb, 'input') output_set = FileOperations.is_set_untracked(lb, 'output') if not (input_set and output_set): raise ValueError( f'{str(lb)} untracking for input/output in malformed state' ) if not lb.is_repo_clean: raise ValueError( f'{str(lb)} should have clean Git state after setting for untracked' ) adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False, importance=0) adr.add_value('text/plain', f"Created new LabBook: {username}/{name}") # Create activity record ar = ActivityRecord(ActivityType.LABBOOK, message=f"Created new LabBook: {username}/{name}", show=True, importance=255, linked_commit=lb.git.commit_hash) ar.add_detail_object(adr) store = ActivityStore(lb) store.create_activity_record(ar) cm = ComponentManager(lb) cm.add_base(repository, base_id, revision) return CreateLabbook(labbook=Labbook(owner=username, name=lb.name))
def resolve_commits_ahead(self, info): lb = InventoryManager().load_labbook(get_logged_in_username(), self.owner, self.name) self._configure_git(lb, info) bm = BranchManager(lb) return bm.get_commits_ahead(branch_name=self.branch_name)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name) d = Dispatcher() lb_jobs = d.get_jobs_for_labbook(lb.key) jobs = [ j for j in d.get_jobs_for_labbook(lb.key) if j.meta.get('method') == 'build_image' and j.status == 'started' ] if len(jobs) == 1: d.abort_task(jobs[0].job_key) ContainerOperations.delete_image(lb, username=username) return CancelBuild(build_stopped=True, message="Stopped build") elif len(jobs) == 0: logger.warning(f"No build_image tasks found for {str(lb)}") return CancelBuild(build_stopped=False, message="No build task found") else: logger.warning(f"Multiple build jobs found for {str(lb)}") return CancelBuild(build_stopped=False, message="Multiple builds found")
def mutate_and_get_payload(cls, root, info, owner, labbook_name, no_cache=False, client_mutation_id=None): username = get_logged_in_username() if BuildImage.get_container_status(labbook_name, owner, username): raise ValueError(f'Cannot build image for running container {owner}/{labbook_name}') lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) # Generate Dockerfile # TODO BVB - Move to build_image ?? ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) # Kick off building in a background thread d = Dispatcher() build_kwargs = { 'path': lb.root_dir, 'username': username, 'nocache': no_cache } metadata = {'labbook': lb.key, 'method': 'build_image'} res = d.dispatch_task(jobs.build_labbook_image, kwargs=build_kwargs, metadata=metadata) return BuildImage(environment=Environment(owner=owner, name=labbook_name), background_job_key=res.key_str)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() logger.info(f'Exporting LabBook: {username}/{owner}/{labbook_name}') working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) job_metadata = {'method': 'export_labbook_as_zip', 'labbook': lb.key} job_kwargs = { 'labbook_path': lb.root_dir, 'lb_export_directory': os.path.join(working_directory, 'export') } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task(jobs.export_labbook_as_zip, kwargs=job_kwargs, metadata=job_metadata) logger.info( f"Exporting LabBook {lb.root_dir} in background job with key {job_key.key_str}" ) return ExportLabbook(job_key=job_key.key_str)
def mutate_and_get_payload(cls, root, info, owner, dataset_name, transaction_id, cancel=False, rollback=False, client_mutation_id=None): username = get_logged_in_username() ds = InventoryManager().load_dataset(username, owner, dataset_name, author=get_logged_in_author()) with ds.lock(): if cancel and rollback: logger.warning( f"Cancelled tx {transaction_id}, doing git reset") # TODO: Add ability to reset else: logger.info( f"Done batch upload {transaction_id}, cancelled={cancel}") if cancel: logger.warning("Sweeping aborted batch upload.") m = "Cancelled upload `{transaction_id}`. " if cancel else '' # Sweep up and process all files added during upload manifest = Manifest(ds, username) manifest.sweep_all_changes(upload=True, extra_msg=m) return CompleteDatasetUploadTransaction(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, description_content, client_mutation_id=None): username = get_logged_in_username() lb = LabBook(author=get_logged_in_author()) lb.from_name(username, owner, labbook_name) lb.description = description_content with lb.lock_labbook(): lb.git.add(os.path.join(lb.root_dir, '.gigantum/labbook.yaml')) commit = lb.git.commit('Updating description') # Create detail record adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False) adr.add_value('text/plain', "Updated description of LabBook") # Create activity record ar = ActivityRecord(ActivityType.LABBOOK, message="Updated description of LabBook", linked_commit=commit.hexsha, tags=["labbook"], show=False) ar.add_detail_object(adr) # Store ars = ActivityStore(lb) ars.create_activity_record(ar) return SetLabbookDescription(success=True)
def mutate_and_process_upload(cls, info, upload_file_path, upload_filename, **kwargs): if not upload_file_path: logger.error('No file uploaded') raise ValueError('No file uploaded') username = get_logged_in_username() owner = kwargs.get('owner') dataset_name = kwargs.get('dataset_name') file_path = kwargs.get('file_path') try: ds = InventoryManager().load_dataset(username, owner, dataset_name, author=get_logged_in_author()) with ds.lock(): if not os.path.abspath(upload_file_path): raise ValueError( f"Source file `{upload_file_path}` not an absolute path" ) if not os.path.isfile(upload_file_path): raise ValueError( f"Source file does not exist at `{upload_file_path}`") manifest = Manifest(ds, username) full_dst = manifest.get_abs_path(file_path) # If file (hard link) already exists, remove it first so you don't write to all files with same content if os.path.isfile(full_dst): os.remove(full_dst) full_dst_base = os.path.dirname(full_dst) if not os.path.isdir(full_dst_base): pathlib.Path(full_dst_base).mkdir(parents=True, exist_ok=True) shutil.move(upload_file_path, full_dst) file_info = manifest.gen_file_info(file_path) finally: try: logger.debug(f"Removing temp file {upload_file_path}") os.remove(upload_file_path) except FileNotFoundError: pass # Create data to populate edge create_data = { 'owner': owner, 'name': dataset_name, 'key': file_info['key'], '_file_info': file_info } # TODO: Fix cursor implementation. this currently doesn't make sense when adding edges cursor = base64.b64encode(f"{0}".encode('utf-8')) return AddDatasetFile(new_dataset_file_edge=DatasetFileConnection.Edge( node=DatasetFile(**create_data), cursor=cursor))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, description_content, client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) lb.description = description_content with lb.lock(): lb.git.add(os.path.join(lb.config_path)) commit = lb.git.commit('Updating description') adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False) adr.add_value('text/plain', "Updated description of Project") ar = ActivityRecord(ActivityType.LABBOOK, message="Updated description of Project", linked_commit=commit.hexsha, tags=["labbook"], show=False) ar.add_detail_object(adr) ars = ActivityStore(lb) ars.create_activity_record(ar) return SetLabbookDescription(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): # Load LabBook username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) # Extract valid Bearer token if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) # BVB -- Should this defer to `sync` if Labbook's remote is already set? # Otherwise, it will throw an exception, which may still be ok. wf = GitWorkflow(labbook=lb) wf.publish(username=username, access_token=token) return PublishLabbook(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) migrated = False with lb.lock(): t0 = time.time() workflow = LabbookWorkflow(lb) migrated = workflow.migrate() tf = time.time() if migrated: logger.info(f"Migrated {str(lb)} in {tf-t0:.2}sec") else: logger.info(f"No migration needed for {str(lb)}") return MigrateLabbookSchema( Labbook(id=f"{owner}&{labbook_name}", name=labbook_name, owner=owner))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, pull_only=False, override_method="abort", client_mutation_id=None): # Load LabBook username = get_logged_in_username() lb = InventoryManager().load_labbook(username, owner, labbook_name, author=get_logged_in_author()) # Extract valid Bearer token token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError( "Authorization header not provided. " "Must have a valid session to query for collaborators") default_remote = lb.client_config.config['git']['default_remote'] admin_service = None for remote in lb.client_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.client_config.config['git']['remotes'][ remote]['admin_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Configure git creds mgr = GitLabManager(default_remote, admin_service, access_token=token) mgr.configure_git_credentials(default_remote, username) override = MergeOverride(override_method) job_metadata = {'method': 'sync_labbook', 'labbook': lb.key} job_kwargs = { 'repository': lb, 'pull_only': pull_only, 'username': username, 'override': override } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task(jobs.sync_repository, kwargs=job_kwargs, metadata=job_metadata) logger.info( f"Syncing LabBook {lb.root_dir} in background job with key {job_key.key_str}" ) return SyncLabbook(job_key=job_key.key_str)