예제 #1
0
def mock_create_labbooks(fixture_working_dir):
    # Create a labbook in the temporary directory
    config_file = fixture_working_dir[0]
    im = InventoryManager(fixture_working_dir[0])
    lb = im.create_labbook(UT_USERNAME,
                           UT_USERNAME,
                           UT_LBNAME,
                           description="Cats labbook 1")

    # Create a file in the dir
    with open(os.path.join(fixture_working_dir[1], 'unittest-examplefile'),
              'w') as sf:
        sf.write("test data")
        sf.seek(0)
    FileOperations.insert_file(lb, 'code', sf.name)

    assert os.path.isfile(
        os.path.join(lb.root_dir, 'code', 'unittest-examplefile'))

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)
    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())
        with app.app_context():
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()
            client = Client(
                schema,
                middleware=[DataloaderMiddleware(), error_middleware],
                context_value=ContextMock())
            yield lb, client, schema
    shutil.rmtree(fixture_working_dir, ignore_errors=True)
예제 #2
0
def fixture_working_dir_lfs_disabled():
    """A pytest fixture that creates a temporary working directory, config file, schema, and local user identity
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir(lfs_enabled=False)

    # Create user identity
    insert_cached_identity(temp_dir)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current usert explicitly(this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(schema,
                            middleware=[DataloaderMiddleware()],
                            context_value=ContextMock())

            yield config_file, temp_dir, client, schema  # name of the config file, temporary working directory, the schema

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
예제 #3
0
    def test_import_labbook(self, fixture_working_dir):
        """Test batch uploading, but not full import"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(fixture_working_dir[3], middleware=[DataloaderMiddleware()])

        # Create a temporary labbook
        lb = InventoryManager(fixture_working_dir[0]).create_labbook("default", "default", "test-export",
                                                                     description="Tester")

        # Create a largeish file in the dir
        with open(os.path.join(fixture_working_dir[1], 'testfile.bin'), 'wb') as testfile:
            testfile.write(os.urandom(9000000))
        FileOperations.insert_file(lb, 'input', testfile.name)

        # Export labbook
        zip_file = export_labbook_as_zip(lb.root_dir, tempfile.gettempdir())
        lb_dir = lb.root_dir

        # Get upload params
        chunk_size = 4194304
        file_info = os.stat(zip_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size/chunk_size))

        with open(zip_file, 'rb') as tf:
            for chunk_index in range(total_chunks):
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation myMutation{{
                              importLabbook(input:{{
                                chunkUploadParams:{{
                                  uploadId: "jfdjfdjdisdjwdoijwlkfjd",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSize: "{file_size}",
                                  filename: "{os.path.basename(zip_file)}"
                                }}
                              }}) {{
                                importJobKey
                              }}
                            }}
                            """
                result = client.execute(query, context_value=DummyContext(file))
                assert "errors" not in result
                if chunk_index == total_chunks - 1:
                    assert type(result['data']['importLabbook']['importJobKey']) == str
                    assert "rq:job:" in result['data']['importLabbook']['importJobKey']

                chunk.close()
예제 #4
0
def fixture_single_dataset():
    """A pytest fixture that creates a temporary working directory, a config file to match, creates the schema,
    and populates the environment component repository.
    Class scope modifier attached
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    insert_cached_identity(temp_dir)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    # Create a bunch of lab books
    im = InventoryManager(config_file)

    ds = im.create_dataset('default',
                           'default',
                           "test-dataset",
                           storage_type="gigantum_object_v1",
                           description="Cats 2")
    m = Manifest(ds, 'default')
    cm_class = get_cache_manager_class(ds.client_config)
    cache_mgr = cm_class(ds, 'default')
    revision = ds.git.repo.head.commit.hexsha

    os.makedirs(os.path.join(cache_mgr.cache_root, revision, "other_dir"))
    helper_append_file(cache_mgr.cache_root, revision, "test1.txt", "asdfasdf")
    helper_append_file(cache_mgr.cache_root, revision, "test2.txt", "rtg")
    helper_append_file(cache_mgr.cache_root, revision, "test3.txt", "wer")
    helper_append_file(cache_mgr.cache_root, revision, "other_dir/test4.txt",
                       "dfasdfhfgjhg")
    helper_append_file(cache_mgr.cache_root, revision, "other_dir/test5.txt",
                       "fdghdfgsa")
    m.update()

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current user explicitly (this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(schema,
                            middleware=[DataloaderMiddleware()],
                            context_value=ContextMock())

            yield config_file, temp_dir, client, ds, cache_mgr

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
예제 #5
0
def fixture_working_dir_env_repo_scoped():
    """A pytest fixture that creates a temporary working directory, a config file to match, creates the schema,
    and populates the environment component repository.
    Class scope modifier attached
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    insert_cached_identity(temp_dir)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    # get environment data and index
    erm = RepositoryManager(config_file)
    erm.update_repositories()
    erm.index_repositories()

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current user explicitly (this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(
                schema,
                middleware=[DataloaderMiddleware(), error_middleware],
                context_value=ContextMock())

            # name of the config file, temporary working directory, the schema
            yield config_file, temp_dir, client, schema

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
예제 #6
0
    def test_add_file(self, mock_create_labbooks):
        """Test adding a new file to a labbook"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(mock_create_labbooks[3],
                        middleware=[DataloaderMiddleware()])

        # Create file to upload
        test_file = os.path.join(tempfile.gettempdir(), "myValidFile.dat")
        est_size = 9000000
        try:
            os.remove(test_file)
        except:
            pass
        with open(test_file, 'wb') as tf:
            tf.write(os.urandom(est_size))

        new_file_size = os.path.getsize(tf.name)
        # Get upload params
        chunk_size = 4194000
        file_info = os.stat(test_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        target_file = os.path.join(mock_create_labbooks[1], 'default',
                                   'default', 'labbooks', 'labbook1', 'code',
                                   'newdir', "myValidFile.dat")
        lb = InventoryManager(mock_create_labbooks[0]).load_labbook(
            'default', 'default', 'labbook1')
        FileOperations.makedir(lb, 'code/newdir', create_activity_record=True)

        txid = "000-unitest-transaction"
        with open(test_file, 'rb') as tf:
            # Check for file to exist (shouldn't yet)
            assert os.path.exists(target_file) is False
            for chunk_index in range(total_chunks):
                # Upload a chunk
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation addLabbookFile{{
                              addLabbookFile(input:{{owner:"default",
                                                      labbookName: "labbook1",
                                                      section: "code",
                                                      filePath: "newdir/myValidFile.dat",
                                                      transactionId: "{txid}",
                                chunkUploadParams:{{
                                  uploadId: "fdsfdsfdsfdfs",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSizeKb: {file_size},
                                  filename: "{os.path.basename(test_file)}"
                                }}
                              }}) {{
                                      newLabbookFileEdge {{
                                        node{{
                                          id
                                          key
                                          isDir
                                          size
                                          modifiedAt
                                        }}
                                      }}
                                    }}
                            }}
                            """
                r = client.execute(query, context_value=DummyContext(file))
        assert 'errors' not in r
        # So, these will only be populated once the last chunk is uploaded. Will be None otherwise.
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'isDir'] is False
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'key'] == 'newdir/myValidFile.dat'
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'size'] == f"{new_file_size}"
        assert isinstance(
            r['data']['addLabbookFile']['newLabbookFileEdge']['node']
            ['modifiedAt'], float)
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node']['modifiedAt'] != \
               round(r['data']['addLabbookFile']['newLabbookFileEdge']['node']['modifiedAt'])
        # When done uploading, file should exist in the labbook
        assert os.path.exists(target_file)
        assert os.path.isfile(target_file)

        complete_query = f"""
        mutation completeQuery {{
            completeBatchUploadTransaction(input: {{
                owner: "default",
                labbookName: "labbook1",
                transactionId: "{txid}"
            }}) {{
                success
            }}
        }}
        """
        r = client.execute(complete_query, context_value=DummyContext(file))
        assert 'errors' not in r
        assert lb.is_repo_clean
        assert 'Uploaded 1 new file(s)' in lb.git.log()[0]['message']
예제 #7
0
    def test_add_file_fail_due_to_git_ignore(self, mock_create_labbooks):
        """Test adding a new file to a labbook"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(mock_create_labbooks[3],
                        middleware=[DataloaderMiddleware()])

        new_file_size = 9000000
        # Create file to upload
        test_file = os.path.join(tempfile.gettempdir(), ".DS_Store")
        with open(test_file, 'wb') as tf:
            tf.write(os.urandom(new_file_size))

        # Get upload params
        chunk_size = 4194000
        file_info = os.stat(test_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        target_file = os.path.join(mock_create_labbooks[1], 'default',
                                   'default', 'labbooks', 'labbook1', 'code',
                                   'newdir', '.DS_Store')
        try:
            os.remove(target_file)
        except:
            pass
        lb = InventoryManager(mock_create_labbooks[0]).load_labbook(
            'default', 'default', 'labbook1')
        FileOperations.makedir(lb, 'code/newdir', create_activity_record=True)

        with open(test_file, 'rb') as tf:
            # Check for file to exist (shouldn't yet)
            assert os.path.exists(target_file) is False

            for chunk_index in range(total_chunks):
                # Upload a chunk
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation addLabbookFile{{
                              addLabbookFile(input:{{owner:"default",
                                                      labbookName: "labbook1",
                                                      section: "code",
                                                      filePath: "newdir/.DS_Store",
                                                      transactionId: "111-unittest-tx",
                                chunkUploadParams:{{
                                  uploadId: "jfdjfdjdisdjwdoijwlkfjd",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSizeKb: {file_size},
                                  filename: "{os.path.basename(test_file)}"
                                }}
                              }}) {{
                                      newLabbookFileEdge {{
                                        node{{
                                          id
                                          key
                                          isDir
                                          size
                                        }}
                                      }}
                                    }}
                            }}
                            """
                r = client.execute(query, context_value=DummyContext(file))

            # This must be outside of the chunk upload loop
            pprint.pprint(r)
            assert 'errors' in r
            assert len(r['errors']) == 1
            assert 'matches ignored pattern' in r['errors'][0]['message']

        # When done uploading, file should exist in the labbook
        assert os.path.isfile(target_file) is False
        assert os.path.exists(target_file) is False
예제 #8
0
    def test_upload_secrets_file(self, fixture_working_dir, mock_upload_key):
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(fixture_working_dir[3],
                        middleware=[DataloaderMiddleware()])

        im = InventoryManager(fixture_working_dir[0])
        lb = im.create_labbook("default", "default", "unittest-upload-secret")
        secret_store = SecretStore(lb, "default")
        secret_store['id_rsa'] = '/opt/secrets/location/in/container'
        initial_hash = hashlib.md5(open(mock_upload_key.name,
                                        'rb').read()).hexdigest()

        new_file_size = os.path.getsize(mock_upload_key.name)
        chunk_size = 4194000
        file_info = os.stat(mock_upload_key.name)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        mf = open(mock_upload_key.name)
        for chunk_index in range(total_chunks):
            chunk = io.BytesIO()
            chunk.write(mf.read(chunk_size).encode())
            chunk.seek(0)
            upload_query = f"""
            mutation upload {{
                uploadSecretsFile(input: {{
                    owner: "default",
                    labbookName: "unittest-upload-secret",
                    transactionId: "unittest-txid-9999",
                    chunkUploadParams: {{
                        uploadId: "rando-upload-id-1234",
                        chunkSize: {chunk_size},
                        totalChunks: {total_chunks},
                        chunkIndex: {chunk_index},
                        fileSize: "{file_size}",
                        filename: "{os.path.basename(mock_upload_key.name)}"
                    }}
                }}) {{
                    environment {{
                        secretsFileMapping {{
                            edges {{
                                node {{
                                    filename
                                    isPresent
                                    mountPath
                                }}
                            }}
                        }}
                    }}
                }}
            }}"""

            file = FileStorage(chunk)
            r = client.execute(upload_query, context_value=DummyContext(file))

        secret_info = r['data']['uploadSecretsFile']['environment'][
            'secretsFileMapping']['edges'][0]['node']
        assert secret_info['filename'] == 'id_rsa'
        assert secret_info['mountPath'] == '/opt/secrets/location/in/container'
        assert secret_info['isPresent'] is True

        # Test that the uploaded file hash exactly matches that as the one in the "vault"
        d = secret_store.as_mount_dict
        uploaded_hash = hashlib.md5(open(f'{list(d.keys())[0]}',
                                         'rb').read()).hexdigest()
        assert initial_hash == uploaded_hash
예제 #9
0
    def test_add_file(self, mock_create_dataset):
        """Test adding a new file to a labbook"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.dataset_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(mock_create_dataset[3],
                        middleware=[DataloaderMiddleware()])

        # Create file to upload
        test_file = os.path.join(tempfile.gettempdir(), "myValidFile.dat")
        est_size = 9000000
        try:
            os.remove(test_file)
        except:
            pass
        with open(test_file, 'wb') as tf:
            tf.write(os.urandom(est_size))

        new_file_size = os.path.getsize(tf.name)
        # Get upload params
        chunk_size = 4194000
        file_info = os.stat(test_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        ds = InventoryManager(mock_create_dataset[0]).load_dataset(
            'default', 'default', 'dataset1')

        fsc = HostFilesystemCache(ds, 'default')
        target_file = os.path.join(fsc.current_revision_dir, "myValidFile.dat")

        txid = "000-unitest-transaction"
        with open(test_file, 'rb') as tf:
            # Check for file to exist (shouldn't yet)
            assert os.path.exists(target_file) is False
            for chunk_index in range(total_chunks):
                # Upload a chunk
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation addDatasetFile{{
                              addDatasetFile(input:{{owner:"default",
                                                      datasetName: "dataset1",
                                                      filePath: "myValidFile.dat",
                                                      transactionId: "{txid}",
                                chunkUploadParams:{{
                                  uploadId: "fdsfdsfdsfdfs",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSizeKb: {file_size},
                                  filename: "{os.path.basename(test_file)}"
                                }}
                              }}) {{
                                      newDatasetFileEdge {{
                                        node{{
                                          id
                                          key
                                          isDir
                                          size
                                        }}
                                      }}
                                    }}
                            }}
                            """
                r = client.execute(query, context_value=DummyContext(file))
        assert 'errors' not in r

        # So, these will only be populated once the last chunk is uploaded. Will be None otherwise.
        assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][
            'isDir'] is False
        assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][
            'key'] == 'myValidFile.dat'
        assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][
            'size'] == f"{new_file_size}"
        # When done uploading, file should exist in the labbook
        assert os.path.exists(target_file)
        assert os.path.isfile(target_file)

        complete_query = f"""
        mutation completeQuery {{
            completeDatasetUploadTransaction(input: {{
                owner: "default",
                datasetName: "dataset1",
                transactionId: "{txid}"
            }}) {{
                success
            }}
        }}
        """
        r = client.execute(complete_query, context_value=DummyContext(file))
        assert 'errors' not in r

        m = Manifest(ds, 'default')
        status = m.status()
        assert len(status.created) == 0
        assert len(status.modified) == 0
        assert len(status.deleted) == 0

        assert 'Uploaded 1 new file(s)' in ds.git.log()[0]['message']
    def test_write_chunks_out_of_order(self, mock_create_labbooks):
        """Test adding a new file to a labbook"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        client = Client(mock_create_labbooks[3],
                        middleware=[DataloaderMiddleware()])

        # Create file to upload
        test_file = os.path.join(tempfile.gettempdir(), "myValidFile.dat")
        est_size = 9826421
        try:
            os.remove(test_file)
        except:
            pass
        with open(test_file, 'wb') as tf:
            tf.write(os.urandom(est_size))

        new_file_size = os.path.getsize(tf.name)
        # Get upload params
        chunk_size = 419400
        file_info = os.stat(test_file)
        file_size = file_info.st_size
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        target_file = os.path.join(mock_create_labbooks[1], 'default',
                                   'default', 'labbooks', 'labbook1', 'code',
                                   "myValidFile.dat")

        txid = "000-unitest-transaction"

        chunks = list()
        with open(test_file, 'rb') as tf:
            for chunk_index in range(total_chunks):
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                chunks.append((chunk_index, chunk))

        last_chunk = chunks.pop()
        random.shuffle(chunks)
        chunks.append(last_chunk)

        # Check for file to exist (shouldn't yet)
        assert os.path.exists(target_file) is False
        for chunk in chunks:
            # Upload a chunk
            file = FileStorage(chunk[1])

            query = f"""
            mutation addLabbookFile {{
                addLabbookFile(input: {{
                    owner:"default",
                    labbookName: "labbook1",
                    section: "code",
                    filePath: "myValidFile.dat",
                    transactionId: "{txid}",
                    chunkUploadParams: {{
                        uploadId: "fdsfdsfdsfdfs",
                        chunkSize: {chunk_size},
                        totalChunks: {total_chunks},
                        chunkIndex: {chunk[0]},
                        fileSize: "{file_size}",
                        filename: "{os.path.basename(test_file)}"
                    }}
                }}) {{
                    newLabbookFileEdge {{
                        node {{
                            id
                            key
                            isDir
                            size
                            modifiedAt
                        }}
                    }}
                }}
            }}
            """
            r = client.execute(query, context_value=DummyContext(file))
            assert 'errors' not in r

        assert 'errors' not in r
        # So, these will only be populated once the last chunk is uploaded. Will be None otherwise.
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'isDir'] is False
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'key'] == 'myValidFile.dat'
        assert r['data']['addLabbookFile']['newLabbookFileEdge']['node'][
            'size'] == f"{new_file_size}"
        assert isinstance(
            r['data']['addLabbookFile']['newLabbookFileEdge']['node']
            ['modifiedAt'], float)
        # When done uploading, file should exist in the labbook
        assert os.path.exists(target_file)
        assert os.path.isfile(target_file)

        complete_query = f"""
        mutation completeQuery {{
            completeBatchUploadTransaction(input: {{
                owner: "default",
                labbookName: "labbook1",
                transactionId: "{txid}"
            }}) {{
                success
            }}
        }}
        """
        r = client.execute(complete_query, context_value=DummyContext(file))
        assert 'errors' not in r

        lb = InventoryManager(mock_create_labbooks[0]).load_labbook(
            'default', 'default', 'labbook1')

        with open(test_file, 'rb') as tf:
            with open(os.path.join(lb.root_dir, 'code', 'myValidFile.dat'),
                      'rb') as nf:
                assert tf.read() == nf.read()
    def test_add_file(self, mock_create_dataset):
        """Test adding a new file to a dataset"""
        class DummyContext(object):
            def __init__(self, file_handle):
                self.dataset_loader = None
                self.labbook_loader = None
                self.files = {'uploadChunk': file_handle}

        def dispatcher_mock(self, function_ref, kwargs, metadata):
            assert kwargs['logged_in_username'] == 'default'
            assert kwargs['logged_in_email'] == '*****@*****.**'
            assert kwargs['dataset_owner'] == 'default'
            assert kwargs['dataset_name'] == 'dataset1'

            # Inject mocked config file
            kwargs['config_file'] = mock_create_dataset[0]

            # Stop patching so job gets scheduled for real
            dispatcher_patch.stop()

            # Call same method as in mutation
            d = Dispatcher()
            kwargs['dispatcher'] = Dispatcher
            res = d.dispatch_task(gtmcore.dispatcher.dataset_jobs.
                                  complete_dataset_upload_transaction,
                                  kwargs=kwargs,
                                  metadata=metadata)

            return res

        client = Client(mock_create_dataset[3],
                        middleware=[DataloaderMiddleware()])

        # Create file to upload
        test_file = os.path.join(tempfile.gettempdir(), "myValidFile.dat")
        est_size = 9000000
        try:
            os.remove(test_file)
        except:
            pass
        with open(test_file, 'wb') as tf:
            tf.write(os.urandom(est_size))

        new_file_size = os.path.getsize(tf.name)
        # Get upload params
        chunk_size = 4194000
        file_info = os.stat(test_file)
        file_size = int(file_info.st_size / 1000)
        total_chunks = int(math.ceil(file_info.st_size / chunk_size))

        ds = InventoryManager(mock_create_dataset[0]).load_dataset(
            'default', 'default', 'dataset1')

        fsc = HostFilesystemCache(ds, 'default')
        target_file = os.path.join(fsc.current_revision_dir, "myValidFile.dat")

        txid = "000-unitest-transaction"
        with open(test_file, 'rb') as tf:
            # Check for file to exist (shouldn't yet)
            assert os.path.exists(target_file) is False
            for chunk_index in range(total_chunks):
                # Upload a chunk
                chunk = io.BytesIO()
                chunk.write(tf.read(chunk_size))
                chunk.seek(0)
                file = FileStorage(chunk)

                query = f"""
                            mutation addDatasetFile{{
                              addDatasetFile(input:{{owner:"default",
                                                      datasetName: "dataset1",
                                                      filePath: "myValidFile.dat",
                                                      transactionId: "{txid}",
                                chunkUploadParams:{{
                                  uploadId: "fdsfdsfdsfdfs",
                                  chunkSize: {chunk_size},
                                  totalChunks: {total_chunks},
                                  chunkIndex: {chunk_index},
                                  fileSize: "{file_size}",
                                  filename: "{os.path.basename(test_file)}"
                                }}
                              }}) {{
                                      newDatasetFileEdge {{
                                        node{{
                                          id
                                          key
                                          isDir
                                          size
                                        }}
                                      }}
                                    }}
                            }}
                            """
                r = client.execute(query, context_value=DummyContext(file))
                assert 'errors' not in r

        # So, these will only be populated once the last chunk is uploaded. Will be None otherwise.
        assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][
            'isDir'] is False
        assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][
            'key'] == 'myValidFile.dat'
        assert r['data']['addDatasetFile']['newDatasetFileEdge']['node'][
            'size'] == f"{new_file_size}"
        # When done uploading, file should exist in the labbook
        assert os.path.exists(target_file)
        assert os.path.isfile(target_file)

        complete_query = f"""
        mutation completeQuery {{
            completeDatasetUploadTransaction(input: {{
                owner: "default",
                datasetName: "dataset1",
                transactionId: "{txid}"
            }}) {{
                backgroundJobKey
            }}
        }}
        """

        # Patch dispatch_task so you can inject the mocked config file
        dispatcher_patch = patch.object(Dispatcher, 'dispatch_task',
                                        dispatcher_mock)
        dispatcher_patch.start()

        r = client.execute(complete_query, context_value=DummyContext(None))
        assert 'errors' not in r

        job_query = f"""
                       {{
                           jobStatus(jobId: "{r['data']['completeDatasetUploadTransaction']['backgroundJobKey']}")
                            {{                                
                                status
                                result
                                status
                                jobMetadata
                                failureMessage
                                startedAt
                                finishedAt
                            }}
                       }}
                       """

        cnt = 0
        while cnt < 20:
            job_result = client.execute(job_query,
                                        context_value=DummyContext(None))
            assert 'errors' not in job_result
            if job_result['data']['jobStatus']['status'] == 'finished':
                break
            time.sleep(.25)

        assert cnt < 20
        metadata = json.loads(job_result['data']['jobStatus']['jobMetadata'])
        assert metadata['percent_complete'] == 100
        assert metadata[
            'feedback'] == 'Please wait while file contents are analyzed. 9 MB of 9 MB complete...'

        # Verify file was added and repo is clean
        m = Manifest(ds, 'default')
        status = m.status()
        assert len(status.created) == 0
        assert len(status.modified) == 0
        assert len(status.deleted) == 0

        assert 'Uploaded 1 new file(s)' in ds.git.log()[0]['message']
예제 #12
0
def build_image_for_jupyterlab():
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    insert_cached_identity(temp_dir)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    # get environment data and index
    erm = RepositoryManager(config_file)
    erm.update_repositories()
    erm.index_repositories()

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current user explicitly (this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(
                schema,
                middleware=[DataloaderMiddleware(), error_middleware],
                context_value=ContextMock())

            # Create a labook
            im = InventoryManager(config_file)
            lb = im.create_labbook('default',
                                   'unittester',
                                   "containerunittestbook",
                                   description="Testing docker building.")
            cm = ComponentManager(lb)
            cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE,
                        ENV_UNIT_TEST_REV)
            cm.add_packages("pip3", [{
                "manager": "pip3",
                "package": "requests",
                "version": "2.18.4"
            }])

            bam = BundledAppManager(lb)
            bam.add_bundled_app(9999, 'share', 'A bundled app for testing',
                                "cd /mnt; python3 -m http.server 9999")

            ib = ImageBuilder(lb)
            ib.assemble_dockerfile(write=True)
            docker_client = get_docker_client()

            try:
                lb, docker_image_id = ContainerOperations.build_image(
                    labbook=lb, username="******")

                # Note: The final field is the owner
                yield lb, ib, docker_client, docker_image_id, client, "unittester"

            finally:
                try:
                    docker_client.containers.get(docker_image_id).stop()
                    docker_client.containers.get(docker_image_id).remove()
                except:
                    pass

                try:
                    docker_client.images.remove(docker_image_id,
                                                force=True,
                                                noprune=False)
                except:
                    pass

                shutil.rmtree(lb.root_dir)
예제 #13
0
def fixture_working_dir_dataset_populated_scoped():
    """A pytest fixture that creates a temporary working directory, a config file to match, creates the schema,
    and populates the environment component repository.
    Class scope modifier attached
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    insert_cached_identity(temp_dir)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    # Create a bunch of lab books
    im = InventoryManager(config_file)

    im.create_dataset('default',
                      'default',
                      "dataset2",
                      storage_type="gigantum_object_v1",
                      description="Cats 2")
    time.sleep(1.1)

    im.create_dataset('default',
                      'default',
                      "dataset3",
                      storage_type="gigantum_object_v1",
                      description="Cats 3")
    time.sleep(1.1)

    im.create_dataset('default',
                      'default',
                      "dataset4",
                      storage_type="gigantum_object_v1",
                      description="Cats 4")
    time.sleep(1.1)

    im.create_dataset('default',
                      'default',
                      "dataset5",
                      storage_type="gigantum_object_v1",
                      description="Cats 5")
    time.sleep(1.1)

    im.create_dataset('default',
                      'default',
                      "dataset6",
                      storage_type="gigantum_object_v1",
                      description="Cats 6")
    time.sleep(1.1)

    im.create_dataset('default',
                      'default',
                      "dataset7",
                      storage_type="gigantum_object_v1",
                      description="Cats 7")
    time.sleep(1.1)

    im.create_dataset('default',
                      'default',
                      "dataset8",
                      storage_type="gigantum_object_v1",
                      description="Cats 8")
    time.sleep(1.1)

    im.create_dataset('default',
                      'default',
                      "dataset9",
                      storage_type="gigantum_object_v1",
                      description="Cats 9")
    time.sleep(1.1)

    im.create_dataset('default',
                      'test3',
                      "dataset-other",
                      storage_type="gigantum_object_v1",
                      description="Cats other")
    time.sleep(1.1)

    im.create_labbook('test3',
                      'test3',
                      "labbook-0",
                      description="This should not show up.")

    im.create_dataset('default',
                      'default',
                      "dataset1",
                      storage_type="gigantum_object_v1",
                      description="Cats 1")
    time.sleep(1.1)

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current user explicitly (this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(schema,
                            middleware=[DataloaderMiddleware()],
                            context_value=ContextMock())

            yield config_file, temp_dir, client, schema

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
예제 #14
0
# Create Blueprint
complete_labbook_service = Blueprint('complete_labbook_service', __name__)

# Create Schema
full_schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

# Add route and require authentication
complete_labbook_service.add_url_rule(
    f'{config.config["proxy"]["labmanager_api_prefix"]}/labbook/',
    view_func=GraphQLView.as_view(
        'graphql',
        schema=full_schema,
        graphiql=config.config["flask"]["DEBUG"],
        middleware=[
            error_middleware,
            #time_all_resolvers_middleware,
            AuthorizationMiddleware(),
            DataloaderMiddleware()
        ]),
    methods=['GET', 'POST', 'OPTION'])

if __name__ == '__main__':
    # If the blueprint file is executed directly, generate a schema file
    introspection_dict = full_schema.introspect()

    # Save the schema
    with open('full_schema.json', 'wt') as fp:
        json.dump(introspection_dict, fp)
        print("Wrote full schema to {}".format(os.path.realpath(fp.name)))