def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): # Load LabBook username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) # Extract valid Bearer token if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) # BVB -- Should this defer to `sync` if Labbook's remote is already set? # Otherwise, it will throw an exception, which may still be ok. wf = GitWorkflow(labbook=lb) wf.publish(username=username, access_token=token) return PublishLabbook(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None): username = get_logged_in_username() logger.info(f'Exporting LabBook: {username}/{owner}/{labbook_name}') working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) job_metadata = {'method': 'export_labbook_as_zip', 'labbook': lb.key} job_kwargs = { 'labbook_path': lb.root_dir, 'lb_export_directory': os.path.join(working_directory, 'export') } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task(jobs.export_labbook_as_zip, kwargs=job_kwargs, metadata=job_metadata) logger.info( f"Exporting LabBook {lb.root_dir} in background job with key {job_key.key_str}" ) return ExportLabbook(job_key=job_key.key_str)
def mutate_and_process_upload(cls, info, owner, labbook_name, section, file_path, chunk_upload_params, transaction_id, client_mutation_id=None): if not cls.upload_file_path: logger.error('No file uploaded') raise ValueError('No file uploaded') try: username = get_logged_in_username() working_directory = Configuration().config['git'] \ ['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) dstpath = os.path.join(os.path.dirname(file_path), cls.filename) fops = FileOperations.put_file(labbook=lb, section=section, src_file=cls.upload_file_path, dst_path=dstpath, txid=transaction_id) finally: try: logger.debug(f"Removing temp file {cls.upload_file_path}") os.remove(cls.upload_file_path) except FileNotFoundError: pass # Create data to populate edge create_data = { 'owner': owner, 'name': labbook_name, 'section': section, 'key': fops['key'], '_file_info': fops } # TODO: Fix cursor implementation.. # this currently doesn't make sense when adding edges cursor = base64.b64encode(f"{0}".encode('utf-8')) return AddLabbookFile(new_labbook_file_edge=LabbookFileConnection.Edge( node=LabbookFile(**create_data), cursor=cursor))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, force=False, client_mutation_id=None): # Load LabBook username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) # Extract valid Bearer token token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) default_remote = lb.labmanager_config.config['git']['default_remote'] admin_service = None for remote in lb.labmanager_config.config['git']['remotes']: if default_remote == remote: admin_service = lb.labmanager_config.config['git']['remotes'][ remote]['admin_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Configure git creds mgr = GitLabManager(default_remote, admin_service, access_token=token) mgr.configure_git_credentials(default_remote, username) wf = GitWorkflow(labbook=lb) cnt = wf.sync(username=username, force=force) # Create an updated graphne Labbook instance to return for convenience of Relay. updatedl = LabbookObject(owner=owner, name=labbook_name) return SyncLabbook(update_count=cnt, updated_labbook=updatedl)
def prior_mutate_and_get_payload(cls, root, info, owner, original_labbook_name, new_labbook_name, client_mutation_id=None): # NOTE!!! This is the code that was originally to rename. # Temporarily, rename functionality is disabled. # Load LabBook username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', original_labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) # Image names old_tag = '{}-{}-{}'.format(username, owner, original_labbook_name) new_tag = '{}-{}-{}'.format(username, owner, new_labbook_name) # Rename LabBook lb.rename(new_labbook_name) logger.info( f"Renamed LabBook from `{original_labbook_name}` to `{new_labbook_name}`" ) # Build image with new name...should be fast and use the Docker cache client = get_docker_client() image_builder = ImageBuilder(lb.root_dir) image_builder.build_image(docker_client=client, image_tag=new_tag, username=username, background=True) # Delete old image if it had previously been built successfully try: client.images.get(old_tag) client.images.remove(old_tag) except ImageNotFound: logger.warning( f"During renaming, original image {old_tag} not found, removal skipped." ) return RenameLabbook(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, no_cache=False, client_mutation_id=None): username = get_logged_in_username() if BuildImage.get_container_status(labbook_name, owner, username): raise ValueError( f'Cannot build image for running container {owner}/{labbook_name}' ) labbook_dir = os.path.expanduser( os.path.join(Configuration().config['git']['working_directory'], username, owner, 'labbooks', labbook_name)) lb = LabBook(author=get_logged_in_author()) lb.from_directory(labbook_dir) # Generate Dockerfile ib = ImageBuilder(lb) ib.assemble_dockerfile(write=True) # Kick off building in a background thread d = Dispatcher() build_kwargs = { 'path': labbook_dir, 'username': username, 'nocache': no_cache } metadata = {'labbook': lb.key, 'method': 'build_image'} res = d.dispatch_task(jobs.build_labbook_image, kwargs=build_kwargs, metadata=metadata) return BuildImage(environment=Environment(owner=owner, name=labbook_name), background_job_key=res.key_str)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, file_path, is_directory=False, client_mutation_id=None): username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) lb.delete_file(section=section, relative_path=file_path, directory=is_directory) return DeleteLabbookFile(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, transaction_id, cancel=False, rollback=False, client_mutation_id=None): username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) FileOperations.complete_batch(lb, transaction_id, cancel=cancel, rollback=rollback) return CompleteBatchUploadTransaction(success=True)
def mutate_and_get_payload(cls, root, info, owner, labbook_name, section, directory, client_mutation_id=None): username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) lb.makedir(os.path.join(section, directory), create_activity_record=True) logger.info(f"Made new directory in `{directory}`") # Prime dataloader with labbook you already loaded dataloader = LabBookLoader() dataloader.prime(f"{owner}&{labbook_name}&{lb.name}", lb) # Create data to populate edge file_info = lb.get_file_info(section, directory) create_data = { 'owner': owner, 'name': labbook_name, 'section': section, 'key': file_info['key'], '_file_info': file_info } # TODO: Fix cursor implementation, this currently doesn't make sense cursor = base64.b64encode(f"{0}".encode('utf-8')) return MakeLabbookDirectory( new_labbook_file_edge=LabbookFileConnection.Edge( node=LabbookFile(**create_data), cursor=cursor))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, confirm, client_mutation_id=None): username = get_logged_in_username() working_directory = Configuration().config['git']['working_directory'] inferred_lb_directory = os.path.join(working_directory, username, owner, 'labbooks', labbook_name) lb = LabBook(author=get_logged_in_author()) lb.from_directory(inferred_lb_directory) if confirm: logger.warning(f"Deleting {str(lb)}...") try: lb, stopped = ContainerOperations.stop_container( labbook=lb, username=username) except OSError: pass lb, docker_removed = ContainerOperations.delete_image( labbook=lb, username=username) if not docker_removed: raise ValueError( f'Cannot delete docker image for {str(lb)} - unable to delete LB from disk' ) shutil.rmtree(lb.root_dir, ignore_errors=True) if os.path.exists(lb.root_dir): logger.error( f'Deleted {str(lb)} but root directory {lb.root_dir} still exists!' ) return DeleteLabbook(success=False) else: return DeleteLabbook(success=True) else: logger.info(f"Dry run in deleting {str(lb)} -- not deleted.") return DeleteLabbook(success=False)
def test_add_file(self, mock_create_labbooks): """Test adding a new file to a labbook""" class DummyContext(object): def __init__(self, file_handle): self.labbook_loader = None self.files = {'uploadChunk': file_handle} client = Client(mock_create_labbooks[3], middleware=[LabBookLoaderMiddleware()]) # Create file to upload test_file = os.path.join(tempfile.gettempdir(), "myValidFile.dat") est_size = 9000000 try: os.remove(test_file) except: pass with open(test_file, 'wb') as tf: tf.write(os.urandom(est_size)) new_file_size = os.path.getsize(tf.name) # Get upload params chunk_size = 4194000 file_info = os.stat(test_file) file_size = int(file_info.st_size / 1000) total_chunks = int(math.ceil(file_info.st_size / chunk_size)) target_file = os.path.join(mock_create_labbooks[1], 'default', 'default', 'labbooks', 'labbook1', 'code', 'newdir', "myValidFile.dat") lb = LabBook(mock_create_labbooks[0]) lb.from_directory( os.path.join(mock_create_labbooks[1], 'default', 'default', 'labbooks', 'labbook1')) lb.makedir('code/newdir', create_activity_record=True) txid = "000-unitest-transaction" with open(test_file, 'rb') as tf: # Check for file to exist (shouldn't yet) assert os.path.exists(target_file) is False for chunk_index in range(total_chunks): # Upload a chunk chunk = io.BytesIO() chunk.write(tf.read(chunk_size)) chunk.seek(0) file = FileStorage(chunk) query = f""" mutation addLabbookFile{{ addLabbookFile(input:{{owner:"default", labbookName: "labbook1", section: "code", filePath: "newdir/myValidFile.dat", transactionId: "{txid}", chunkUploadParams:{{ uploadId: "fdsfdsfdsfdfs", chunkSize: {chunk_size}, totalChunks: {total_chunks}, chunkIndex: {chunk_index}, fileSizeKb: {file_size}, filename: "{os.path.basename(test_file)}" }} }}) {{ newLabbookFileEdge {{ node{{ id key isDir size }} }} }} }} """ r = client.execute(query, context_value=DummyContext(file)) assert 'errors' not in r # So, these will only be populated once the last chunk is uploaded. Will be None otherwise. assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][ 'isDir'] is False assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][ 'key'] == 'newdir/myValidFile.dat' assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][ 'size'] == f"{new_file_size}" # When done uploading, file should exist in the labbook assert os.path.exists(target_file) assert os.path.isfile(target_file) complete_query = f""" mutation completeQuery {{ completeBatchUploadTransaction(input: {{ owner: "default", labbookName: "labbook1", transactionId: "{txid}" }}) {{ success }} }} """ r = client.execute(complete_query, context_value=DummyContext(file)) assert 'errors' not in r assert lb.is_repo_clean assert 'Uploaded new file' in lb.git.log()[0]['message']
def test_add_file_fail_due_to_git_ignore(self, mock_create_labbooks): """Test adding a new file to a labbook""" class DummyContext(object): def __init__(self, file_handle): self.labbook_loader = None self.files = {'uploadChunk': file_handle} client = Client(mock_create_labbooks[3], middleware=[LabBookLoaderMiddleware()]) new_file_size = 9000000 # Create file to upload test_file = os.path.join(tempfile.gettempdir(), ".DS_Store") with open(test_file, 'wb') as tf: tf.write(os.urandom(new_file_size)) # Get upload params chunk_size = 4194000 file_info = os.stat(test_file) file_size = int(file_info.st_size / 1000) total_chunks = int(math.ceil(file_info.st_size / chunk_size)) target_file = os.path.join(mock_create_labbooks[1], 'default', 'default', 'labbooks', 'labbook1', 'code', 'newdir', '.DS_Store') try: os.remove(target_file) except: pass lb = LabBook(mock_create_labbooks[0]) lb.from_directory( os.path.join(mock_create_labbooks[1], 'default', 'default', 'labbooks', 'labbook1')) lb.makedir('code/newdir', create_activity_record=True) with open(test_file, 'rb') as tf: # Check for file to exist (shouldn't yet) assert os.path.exists(target_file) is False for chunk_index in range(total_chunks): # Upload a chunk chunk = io.BytesIO() chunk.write(tf.read(chunk_size)) chunk.seek(0) file = FileStorage(chunk) query = f""" mutation addLabbookFile{{ addLabbookFile(input:{{owner:"default", labbookName: "labbook1", section: "code", filePath: "newdir/.DS_Store", transactionId: "111-unittest-tx", chunkUploadParams:{{ uploadId: "jfdjfdjdisdjwdoijwlkfjd", chunkSize: {chunk_size}, totalChunks: {total_chunks}, chunkIndex: {chunk_index}, fileSizeKb: {file_size}, filename: "{os.path.basename(test_file)}" }} }}) {{ newLabbookFileEdge {{ node{{ id key isDir size }} }} }} }} """ r = client.execute(query, context_value=DummyContext(file)) # This must be outside of the chunk upload loop pprint.pprint(r) assert 'errors' in r assert len(r['errors']) == 1 assert 'matches ignored pattern' in r['errors'][0]['message'] # When done uploading, file should exist in the labbook assert os.path.isfile(target_file) is False assert os.path.exists(target_file) is False