Exemplo n.º 1
0
 def test_remove_secrets_entry(self, fixture_working_dir_env_repo_scoped):
     client = fixture_working_dir_env_repo_scoped[2]
     im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
     lb = im.create_labbook("default", "default",
                            "unittest-mutation-remove-secret")
     SecretStore(lb, "default")['remove.key'] = '/mnt/nowhere'
     SecretStore(lb, "default")['absent.key'] = '/mnt/nowhere2'
     query = """
     mutation remove {
         removeSecretsEntry(input: {
             owner: "default",
             labbookName: "unittest-mutation-remove-secret",
             filename: "remove.key",
         }) {
             environment {
                 secretsFileMapping {
                     edges {
                         node {
                             filename
                             mountPath
                             isPresent
                         }
                     }
                 }
             }
         }
     }"""
     r = client.execute(query)
     assert 'errors' not in r
     n = r['data']['removeSecretsEntry']['environment'][
         'secretsFileMapping']['edges']
     assert len(n) == 1
     assert n[0]['node']['filename'] == 'absent.key'
     assert n[0]['node']['isPresent'] == False
Exemplo n.º 2
0
    def test_secrets_vault_query(self, fixture_working_dir_env_repo_scoped):
        client = fixture_working_dir_env_repo_scoped[2]
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook("default", "default", "unittest-create-secret")
        sec_store = SecretStore(lb, "default")
        container_dst = '/tmp/secrets1'

        sec_store['data1.key'] = container_dst
        sec_store['absent.key'] = container_dst

        with tempfile.TemporaryDirectory() as tdir:
            path = os.path.join(tdir, 'data1.key')
            f1 = open(path, 'w')
            f1.write('<<<keydata>>>')
            f1.close()
            sec_store.insert_file(f1.name)

        query = """
        {
            labbook(owner: "default", name: "unittest-create-secret") {
                environment {
                    secretsFileMapping {
                        edges {
                            node {
                                filename
                                mountPath
                                isPresent
                            }
                        }
                    }
                }
            }
        }
        """
        r = client.execute(query)
        pprint.pprint(r)
        assert 'errors' not in r

        # Test that an absent file (whose contents should be uploaded) is acknowledged, but returns
        # False for isPresent
        assert r['data']['labbook']['environment']['secretsFileMapping'][
            'edges'][0]['node']['filename'] == 'absent.key'
        assert r['data']['labbook']['environment']['secretsFileMapping'][
            'edges'][0]['node']['isPresent'] == False
        assert r['data']['labbook']['environment']['secretsFileMapping'][
            'edges'][0]['node']['mountPath'] == container_dst

        # This file is in the registry AND isPresent
        assert r['data']['labbook']['environment']['secretsFileMapping'][
            'edges'][1]['node']['filename'] == 'data1.key'
        assert r['data']['labbook']['environment']['secretsFileMapping'][
            'edges'][1]['node']['isPresent'] == True
        assert r['data']['labbook']['environment']['secretsFileMapping'][
            'edges'][1]['node']['mountPath'] == container_dst
Exemplo n.º 3
0
    def helper_resolve_secrets_file_mapping(labbook, kwargs):
        secrets_store = SecretStore(labbook, get_logged_in_username())
        edges = secrets_store.secret_map.keys()

        if edges:
            cursors = [
                base64.b64encode(
                    "{}".format(cnt).encode("UTF-8")).decode("UTF-8")
                for cnt, x in enumerate(edges)
            ]

            # Process slicing and cursor args
            lbc = ListBasedConnection(edges, cursors, kwargs)
            lbc.apply()

            # Get DevEnv instances
            edge_objs = []
            for edge, cursor in zip(lbc.edges, lbc.cursors):
                node_obj = SecretFileMapping(owner=labbook.owner,
                                             name=labbook.name,
                                             filename=edge,
                                             mount_path=secrets_store[edge])
                edge_objs.append(
                    SecretFileMappingConnection.Edge(node=node_obj,
                                                     cursor=cursor))
            return SecretFileMappingConnection(edges=edge_objs,
                                               page_info=lbc.page_info)

        else:
            pi = graphene.relay.PageInfo(has_next_page=False,
                                         has_previous_page=False)
            return SecretFileMappingConnection(edges=[], page_info=pi)
Exemplo n.º 4
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               filename,
                               mount_path,
                               client_mutation_id=None):

        if len(mount_path) > 0 and mount_path[0] == '~':
            mount_path = mount_path.replace('~', '/home/giguser')

        if not cls._is_target_valid(mount_path):
            raise ValueError(f"Mount path {mount_path} is not a valid path")

        username = get_logged_in_username()
        lb = InventoryManager().load_labbook(username,
                                             owner,
                                             labbook_name,
                                             author=get_logged_in_author())
        with lb.lock():
            secstore = SecretStore(lb, username)
            secstore[filename] = mount_path
            cls._record_insert_activity(secstore, filename, lb, mount_path)

        env = Environment(owner=owner, name=lb.name)
        return InsertSecretsEntry(environment=env)
Exemplo n.º 5
0
    def test_delete_secrets_file(self, fixture_working_dir_env_repo_scoped):
        client = fixture_working_dir_env_repo_scoped[2]
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook("default", "default",
                               "unittest-mutation-delete-secret")
        secstore = SecretStore(lb, "default")
        secstore['remove.key'] = '/mnt/nowhere'
        secstore['absent.key'] = '/mnt/nowhere2'

        with tempfile.TemporaryDirectory() as tdir:
            path = os.path.join(tdir, 'remove.key')
            f1 = open(path, 'w')
            f1.write('<<<keydata>>>')
            f1.close()
            secstore.insert_file(f1.name)

        query = """
        mutation delete {
            deleteSecretsFile(input: {
                owner: "default",
                labbookName: "unittest-mutation-delete-secret",
                filename: "remove.key",
            }) {
                environment {
                    secretsFileMapping {
                        edges {
                            node {
                                filename
                                mountPath
                                isPresent
                            }
                        }
                    }
                }
            }
        }"""
        r = client.execute(query)
        assert 'errors' not in r
        n = r['data']['deleteSecretsFile']['environment'][
            'secretsFileMapping']['edges']
        assert n[0]['node']['filename'] == 'absent.key'
        assert n[0]['node']['isPresent'] is False
        assert n[0]['node']['mountPath'] == '/mnt/nowhere2'

        assert n[1]['node']['filename'] == 'remove.key'
        assert n[1]['node']['isPresent'] is False
        assert n[1]['node']['mountPath'] == '/mnt/nowhere'
Exemplo n.º 6
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               filename,
                               client_mutation_id=None):
        username = get_logged_in_username()
        lb = InventoryManager().load_labbook(username,
                                             owner,
                                             labbook_name,
                                             author=get_logged_in_author())
        with lb.lock():
            secstore = SecretStore(lb, username)
            secstore.delete_file(filename)

        env = Environment(owner=owner, name=lb.name)
        return DeleteSecretsFile(environment=env)
Exemplo n.º 7
0
    def mutate_and_process_upload(cls, info, upload_file_path, upload_filename,
                                  **kwargs):
        if not upload_file_path:
            logger.error('No file uploaded')
            raise ValueError('No file uploaded')

        username = get_logged_in_username()
        owner = kwargs.get('owner')
        labbook_name = kwargs.get('labbook_name')

        lb = InventoryManager().load_labbook(username, owner, labbook_name)
        with lb.lock():
            secret_store = SecretStore(lb, username)
            inserted_path = secret_store.insert_file(
                upload_file_path, dst_filename=upload_filename)

        env = Environment(owner=owner, name=lb.name)
        return UploadSecretsFile(environment=env)
Exemplo n.º 8
0
    def test_with_secrets(self, build_lb_image_for_jupyterlab):
        fix = ContainerFixture(build_lb_image_for_jupyterlab)
        fix.docker_client.containers.get(fix.docker_container_id).stop()
        fix.docker_client.containers.get(fix.docker_container_id).remove()

        sectore = SecretStore(fix.labbook, fix.username)
        target_dir = '/root/.aws-sample-creds'

        sectore['private-key.key'] = target_dir
        sectore['public-key.key'] = target_dir

        with tempfile.TemporaryDirectory() as tempdir:
            p1 = open(os.path.join(tempdir, 'private-key.key'), 'wb')
            p1.write(b'AWS-mock-PRIVATE')
            p1.close()
            p2 = open(os.path.join(tempdir, 'public-key.key'), 'wb')
            p2.write(b'AWS-mock-PUBLIC')
            p2.close()

            # Add the mock AWS keys
            l1 = sectore.insert_file(p1.name)
            l2 = sectore.insert_file(p2.name)

        container_id = ContainerWorkflows.start_labbook(fix.labbook, fix.username)

        with tempfile.TemporaryDirectory() as td2:
            tfile = open(os.path.join(td2, 'sample.py'), 'w')
            tfile.write("""
import os
r = os.path.expanduser('~/.aws-sample-creds')
pri_key = open(os.path.join(r, 'private-key.key')).read(1000)
pub_key = open(os.path.join(r, 'public-key.key')).read(1000)
print(pri_key, pub_key)""")
            tfile.close()
            ContainerOperations.copy_into_container(fix.labbook, fix.username,
                                                    src_path=tfile.name,
                                                    dst_dir='/tmp/samplescript')
            r = fix.docker_client.containers.get(container_id).\
                exec_run(f'sh -c "python /tmp/samplescript/sample.py"')

            # Run the script to load and print out the mock "secret" keys
            assert r.output.decode().strip() == 'AWS-mock-PRIVATE AWS-mock-PUBLIC'
Exemplo n.º 9
0
    def start_labbook(labbook: LabBook, username: str) -> str:

        _, container_id = ContainerOperations.start_container(
            labbook, username)

        secret_store = SecretStore(labbook, username)

        secrets_dir_map = secret_store.as_mount_dict.items()
        for sec_local_src, sec_container_dst in secrets_dir_map:
            ContainerOperations.copy_into_container(labbook,
                                                    username,
                                                    src_path=sec_local_src,
                                                    dst_dir=sec_container_dst)

        # TODO - if putting a secret fails, then stop container and raise exception

        return container_id
Exemplo n.º 10
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               filename,
                               client_mutation_id=None):
        username = get_logged_in_username()
        lb = InventoryManager().load_labbook(username,
                                             owner,
                                             labbook_name,
                                             author=get_logged_in_author())
        with lb.lock():
            secret_store = SecretStore(lb, username)
            del secret_store[filename]
            cls._record_remove_activity(secret_store, filename, lb)

        env = Environment(owner=owner, name=lb.name)
        return InsertSecretsEntry(environment=env)
Exemplo n.º 11
0
    def delete_labbook(self, username: str, owner: str,
                       labbook_name: str) -> List[DatasetCleanupJob]:
        """Delete a Labbook from this Gigantum working directory.

        Args:
            username: Active username
            owner: Namespace of the Labbook
            labbook_name: Name of the Labbook

        Returns:
            None

        """
        lb = self.load_labbook(username, owner, labbook_name)

        # Get list of datasets and cache roots to schedule for cleanup
        datasets = self.get_linked_datasets(lb)
        datasets_to_schedule = list()
        for ds in datasets:
            try:
                m = Manifest(ds, username)
                if not ds.namespace:
                    raise ValueError(
                        "Dataset namespace required to schedule for cleanup")
                datasets_to_schedule.append(
                    DatasetCleanupJob(namespace=ds.namespace,
                                      name=ds.name,
                                      cache_root=m.cache_mgr.cache_root))
            except Exception as err:
                # Skip errors
                logger.warning(
                    f"Error occurred and ignored while processing submodules during Project delete: {err}"
                )
                continue

        # Delete all secrets pertaining to this project.
        SecretStore(lb, username).clear_files()

        # Remove labbook contents
        shutil.rmtree(lb.root_dir, ignore_errors=True)

        return datasets_to_schedule
Exemplo n.º 12
0
    def test_clean(self, mock_config_file):
        secstore = init(mock_config_file[0])
        mnt_target = '/opt/.ssh'
        secstore['ID_SSH.KEY'] = mnt_target
        with tempfile.TemporaryDirectory() as tempdir:
            with open(os.path.join(tempdir, 'ID_SSH.KEY'), 'w') as t1:
                t1.write('CORRECT_DATA')
            keyfile_dst_1 = secstore.insert_file(t1.name)

        del secstore['ID_SSH.KEY']
        with open(os.path.join(os.path.dirname(keyfile_dst_1), 'badfile'),
                  'w') as bf:
            bf.write('This file must get cleaned.')

        # Even though an incorrect file is in there, it should not be listed.
        assert len(secstore.list_files()) == 0

        # Assert that _clean removes this extraneous file.
        secstore2 = SecretStore(secstore.labbook, secstore.username)
        assert not os.path.exists(bf.name)
Exemplo n.º 13
0
    def test_insert_secrets_entry(self, fixture_working_dir_env_repo_scoped):
        client = fixture_working_dir_env_repo_scoped[2]
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook("default", "default",
                               "unittest-mutation-create-secret")
        query = """
        mutation insert {
            insertSecretsEntry(input: {
                owner: "default",
                labbookName: "unittest-mutation-create-secret",
                filename: "example.key",
                mountPath: "/opt/path"
            }) {
                environment {
                    secretsFileMapping {
                        edges {
                            node {
                                filename
                                mountPath
                                isPresent
                            }
                        }
                    }
                }
            }
        }"""
        r = client.execute(query)
        assert 'errors' not in r
        n = r['data']['insertSecretsEntry']['environment'][
            'secretsFileMapping']['edges'][0]['node']
        assert n['filename'] == 'example.key'
        assert n['isPresent'] == False
        assert n['mountPath'] == '/opt/path'

        # Check that secrets.json is tracked.
        secstore = SecretStore(lb, "default")
        p = call_subprocess(f"git ls-files {secstore.secret_path}".split(),
                            cwd=lb.root_dir)
        assert p.strip() == '.gigantum/secrets.json'
        assert 'Created entry for secrets file' in lb.git.log()[0]['message']
Exemplo n.º 14
0
def init(config):
    im = InventoryManager(config)
    lb = im.create_labbook('test', 'test', 'labbook1')
    return SecretStore(lb, 'test')
Exemplo n.º 15
0
 def _helper_resolve_is_present(self, labbook):
     secret_store = SecretStore(labbook, get_logged_in_username())
     if self.filename in secret_store:
         return (self.filename, True) in secret_store.list_files()
     else:
         return False
Exemplo n.º 16
0
    def test_upload_secrets_file(self, fixture_working_dir, mock_upload_key):
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(fixture_working_dir[3],
                        middleware=[DataloaderMiddleware()])

        im = InventoryManager(fixture_working_dir[0])
        lb = im.create_labbook("default", "default", "unittest-upload-secret")
        secret_store = SecretStore(lb, "default")
        secret_store['id_rsa'] = '/opt/secrets/location/in/container'
        initial_hash = hashlib.md5(open(mock_upload_key.name,
                                        'rb').read()).hexdigest()

        new_file_size = os.path.getsize(mock_upload_key.name)
        chunk_size = 4194000
        file_info = os.stat(mock_upload_key.name)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        mf = open(mock_upload_key.name)
        for chunk_index in range(total_chunks):
            chunk = io.BytesIO()
            chunk.write(mf.read(chunk_size).encode())
            chunk.seek(0)
            upload_query = f"""
            mutation upload {{
                uploadSecretsFile(input: {{
                    owner: "default",
                    labbookName: "unittest-upload-secret",
                    transactionId: "unittest-txid-9999",
                    chunkUploadParams: {{
                        uploadId: "rando-upload-id-1234",
                        chunkSize: {chunk_size},
                        totalChunks: {total_chunks},
                        chunkIndex: {chunk_index},
                        fileSize: "{file_size}",
                        filename: "{os.path.basename(mock_upload_key.name)}"
                    }}
                }}) {{
                    environment {{
                        secretsFileMapping {{
                            edges {{
                                node {{
                                    filename
                                    isPresent
                                    mountPath
                                }}
                            }}
                        }}
                    }}
                }}
            }}"""

            file = FileStorage(chunk)
            r = client.execute(upload_query, context_value=DummyContext(file))

        secret_info = r['data']['uploadSecretsFile']['environment'][
            'secretsFileMapping']['edges'][0]['node']
        assert secret_info['filename'] == 'id_rsa'
        assert secret_info['mountPath'] == '/opt/secrets/location/in/container'
        assert secret_info['isPresent'] is True

        # Test that the uploaded file hash exactly matches that as the one in the "vault"
        d = secret_store.as_mount_dict
        uploaded_hash = hashlib.md5(open(f'{list(d.keys())[0]}',
                                         'rb').read()).hexdigest()
        assert initial_hash == uploaded_hash
Exemplo n.º 17
0
 def _helper_resolve_mount_path(self, labbook):
     secret_store = SecretStore(labbook, get_logged_in_username())
     return secret_store[self.filename]