def mutate_and_get_payload(cls, root, info, owner, labbook_name, branch_name, client_mutation_id=None): """Method to perform mutation Args: input: context: info: Returns: """ username = get_logged_in_username() # Load an existing LabBook labbook_obj = LabBook(author=get_logged_in_author()) labbook_obj.from_name(username, owner, labbook_name) # Checkout labbook_obj.checkout_branch(branch_name) return CheckoutBranch(labbook=Labbook(owner=owner, name=labbook_name))
def mutate_and_get_payload(cls, root, info, owner, labbook_name, branch_name, client_mutation_id=None): """Method to perform mutation Args: input: context: info: Returns: """ username = get_logged_in_username() # Load an existing LabBook labbook_obj = LabBook(author=get_logged_in_author()) labbook_obj.from_name(username, owner, labbook_name) # Create Branch labbook_obj.checkout_branch(branch_name, new=True) if labbook_obj.active_branch != branch_name: raise ValueError( f"Create branch failed, could not switch to new branch {branch_name}" ) # Create a LabbookRef to the branch create_data = { "owner": owner, "name": labbook_name, "prefix": None, "branch": branch_name, } return CreateBranch(branch=LabbookRef(**create_data))
def test_import_remote_labbook_from_same_user(self, remote_labbook_repo, fixture_working_dir): # Create a labbook by the "default" user # TODO: enable LFS when integration tests support it conf_file, working_dir = _create_temp_work_dir(lfs_enabled=False) lb = LabBook(conf_file) labbook_dir = lb.new(username="******", name="default-owned-repo-lb", description="my first labbook", owner={"username": "******"}) lb.checkout_branch("gm.workspace") # Mock the request context so a fake authorization header is present builder = EnvironBuilder(path='/labbook', method='POST', headers={'Authorization': 'Bearer AJDFHASD'}) env = builder.get_environ() req = Request(environ=env) query = f""" mutation importFromRemote {{ importRemoteLabbook( input: {{ owner: "default", labbookName: "default-owned-repo-lb", remoteUrl: "{labbook_dir}" }}) {{ activeBranch }} }} """ r = fixture_working_dir[2].execute(query, context_value=req) # We might not always want to use master as the default branch, but keep it here for now. assert r['data']['importRemoteLabbook'][ 'activeBranch'] == 'gm.workspace-default' ## Now we want to validate that when we import a labbook from a remote url, we also track the default branch. list_all_branches_q = f""" {{ labbook(name: "default-owned-repo-lb", owner: "default") {{ branches {{ edges {{ node {{ prefix refName }} }} }} }} }} """ r = fixture_working_dir[2].execute(list_all_branches_q, context_value=req) nodes = r['data']['labbook']['branches']['edges'] for n in [x['node'] for x in nodes]: # Make sure that origin/master is in list of branches. This means it tracks. if n['prefix'] == 'origin' and n['refName'] == 'gm.workspace': break else: assert False # Make sure the labbook cloned into the correct directory assert os.path.exists( os.path.join(fixture_working_dir[1], 'default', 'default', 'labbooks', 'default-owned-repo-lb')) # Now do a quick test for default_remote get_default_remote_q = f""" {{ labbook(name: "default-owned-repo-lb", owner: "default") {{ defaultRemote }} }} """ r = fixture_working_dir[2].execute(get_default_remote_q, context_value=req) assert r['data']['labbook']['defaultRemote'] == labbook_dir assert 'errors' not in r