コード例 #1
0
    def test_delete_dataset_files_errors(self, fixture_working_dir, snapshot):
        im = InventoryManager(fixture_working_dir[0])
        ds = im.create_dataset('default',
                               'default',
                               "dataset-delete-2",
                               storage_type="gigantum_object_v1",
                               description="testing delete")
        m = Manifest(ds, 'default')

        helper_append_file(m.cache_mgr.cache_root, m.dataset_revision,
                           "test1.txt", "asdfadfsdf")
        helper_append_file(m.cache_mgr.cache_root, m.dataset_revision,
                           "test2.txt", "fdsfgfd")
        m.sweep_all_changes()

        revision = m.dataset_revision
        assert os.path.exists(
            os.path.join(m.cache_mgr.cache_root, revision,
                         "test1.txt")) is True
        assert os.path.exists(
            os.path.join(m.cache_mgr.cache_root, revision,
                         "test2.txt")) is True

        query = """
                   mutation myMutation {
                     deleteDatasetFiles(input: {datasetOwner: "default", datasetName: "dataset-delete-2", 
                                                keys: ["testdfdfdfdf.txt"]}) {
                         success
                     }
                   }
                   """
        result = fixture_working_dir[2].execute(query)
        assert 'errors' in result
コード例 #2
0
    def test_empty_package_counts(self, fixture_working_dir_env_repo_scoped,
                                  snapshot):
        """Test getting the a LabBook's package manager dependencies"""
        # Create labbook
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook("default",
                               "default",
                               "labbook4",
                               description="my first labbook10000")

        query = """
                    {
                      labbook(owner: "default", name: "labbook4") {
                        overview {
                          numAptPackages
                          numConda2Packages
                          numConda3Packages
                          numPipPackages
                          numCustomDependencies
                        }
                      }
                    }
                    """
        snapshot.assert_match(
            fixture_working_dir_env_repo_scoped[2].execute(query))
コード例 #3
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               client_mutation_id=None):
        username = get_logged_in_username()
        lb = InventoryManager().load_labbook(username,
                                             owner,
                                             labbook_name,
                                             author=get_logged_in_author())

        migrated = False
        with lb.lock():
            t0 = time.time()
            workflow = LabbookWorkflow(lb)
            migrated = workflow.migrate()
            tf = time.time()

        if migrated:
            logger.info(f"Migrated {str(lb)} in {tf-t0:.2}sec")
        else:
            logger.info(f"No migration needed for {str(lb)}")

        return MigrateLabbookSchema(
            Labbook(id=f"{owner}&{labbook_name}",
                    name=labbook_name,
                    owner=owner))
コード例 #4
0
    def test_update_unmanaged_dataset_local_errors(
            self, fixture_working_dir_dataset_tests):
        im = InventoryManager(fixture_working_dir_dataset_tests[0])
        ds = im.create_dataset('default',
                               'default',
                               "adataset",
                               storage_type="local_filesystem",
                               description="100")
        dataset_dir = ds.root_dir
        assert os.path.exists(dataset_dir) is True
        flask.g.access_token = "asdf"
        flask.g.id_token = "1234"

        query = """
                    mutation myMutation{
                      updateUnmanagedDataset(input: {datasetOwner: "default", datasetName: "adataset"}) {
                          backgroundJobKey
                      }
                    }
                """
        result = fixture_working_dir_dataset_tests[2].execute(query)
        assert "errors" in result

        # not configured
        query = """
                    mutation myMutation{
                      updateUnmanagedDataset(input: {datasetOwner: "default", datasetName: "adataset",
                       fromLocal: true}) {
                          backgroundJobKey
                      }
                    }
                """
        result = fixture_working_dir_dataset_tests[2].execute(query)
        assert "errors" in result
コード例 #5
0
    def migrate(self) -> bool:
        """ Migrate the given LabBook to the most recent schema AND branch version.

        Returns:
            Boolean indicating whether a migration was performed (False if already up-to-date)
        """

        if self.repository.schema == CURRENT_LABBOOK_SCHEMA:
            logger.info(f"{str(self.labbook)} already migrated.")
            return False

        if 'gm.workspace' not in BranchManager(self.labbook).active_branch:
            raise GitWorkflowException(
                'Must be on a gm.workspace branch to migrate')

        im = InventoryManager(self.labbook.client_config.config_file)
        gitworkflows_utils.migrate_labbook_branches(self.labbook)
        self.repository = im.load_labbook_from_directory(self.labbook.root_dir)

        gitworkflows_utils.migrate_labbook_schema(self.labbook)
        self.repository = im.load_labbook_from_directory(self.labbook.root_dir)

        gitworkflows_utils.migrate_labbook_untracked_space(self.labbook)
        self.repository = im.load_labbook_from_directory(self.labbook.root_dir)

        # Pushes up the new master branch
        if self.repository.has_remote:
            self.sync(username='')

        return True
コード例 #6
0
ファイル: container.py プロジェクト: jjwatts/gigantum-client
def build_lb_image_for_env_conda(mock_config_with_repo):
    """A fixture that installs an old version of matplotlib and latest version of requests to increase code coverage"""
    im = InventoryManager(mock_config_with_repo[0])
    lb = im.create_labbook('unittester', 'unittester', "containerunittestbookenvconda",
                           description="Testing environment functions.")
    cm = ComponentManager(lb)
    cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV)
    cm.add_packages('conda3', [{'package': 'python-coveralls', 'version': '2.7.0'}])

    ib = ImageBuilder(lb)
    ib.assemble_dockerfile(write=True)
    client = get_docker_client()
    client.containers.prune()

    try:
        lb, docker_image_id = ContainerOperations.build_image(labbook=lb, username="******")

        yield lb, 'unittester'

    finally:
        shutil.rmtree(lb.root_dir)
        try:
            client.images.remove(docker_image_id, force=True, noprune=False)
        except:
            pass
コード例 #7
0
ファイル: labbook.py プロジェクト: Pandulis/gigantum-client
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               confirm,
                               client_mutation_id=None):
        username = get_logged_in_username()
        lb = InventoryManager().load_labbook(username,
                                             owner,
                                             labbook_name,
                                             author=get_logged_in_author())
        if confirm:
            logger.info(f"Deleting {str(lb)}...")
            try:
                lb, stopped = ContainerOperations.stop_container(
                    labbook=lb, username=username)
            except OSError as e:
                logger.warning(e)

            lb, docker_removed = ContainerOperations.delete_image(
                labbook=lb, username=username)
            if not docker_removed:
                raise ValueError(
                    f'Cannot delete docker image for {str(lb)} - unable to delete Project from disk'
                )

            datasets_to_schedule = InventoryManager().delete_labbook(
                username, owner, labbook_name)

            # Schedule jobs to clean the file cache for any linked datasets (if no other references exist)
            for cleanup_job in datasets_to_schedule:
                # Schedule Job to clear file cache if dataset is no longer in use
                job_metadata = {'method': 'clean_dataset_file_cache'}
                job_kwargs = {
                    'logged_in_username': username,
                    'dataset_owner': cleanup_job.namespace,
                    'dataset_name': cleanup_job.name,
                    'cache_location': cleanup_job.cache_root
                }
                dispatcher = Dispatcher()
                job_key = dispatcher.dispatch_task(
                    jobs.clean_dataset_file_cache,
                    metadata=job_metadata,
                    kwargs=job_kwargs)
                logger.info(
                    f"Dispatched clean_dataset_file_cache({ cleanup_job.namespace}/{cleanup_job.name})"
                    f" to Job {job_key}")

            # Verify Delete worked
            if os.path.exists(lb.root_dir):
                logger.error(
                    f'Deleted {str(lb)} but root directory {lb.root_dir} still exists!'
                )
                return DeleteLabbook(success=False)
            else:
                return DeleteLabbook(success=True)
        else:
            logger.info(f"Dry run in deleting {str(lb)} -- not deleted.")
            return DeleteLabbook(success=False)
コード例 #8
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               dataset_name,
                               description,
                               client_mutation_id=None):
        username = get_logged_in_username()
        ds = InventoryManager().load_dataset(username,
                                             owner,
                                             dataset_name,
                                             author=get_logged_in_author())
        ds.description = description
        with ds.lock():
            ds.git.add(os.path.join(ds.root_dir, '.gigantum/gigantum.yaml'))
            commit = ds.git.commit('Updating description')

            adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False)
            adr.add_value('text/plain',
                          f"Updated Dataset description: {description}")
            ar = ActivityRecord(ActivityType.LABBOOK,
                                message="Updated Dataset description",
                                linked_commit=commit.hexsha,
                                tags=["dataset"],
                                show=False)
            ar.add_detail_object(adr)
            ars = ActivityStore(ds)
            ars.create_activity_record(ar)
        return SetDatasetDescription(
            updated_dataset=Dataset(owner=owner, name=dataset_name))
コード例 #9
0
def savehook(username, owner, labbook_name):
    try:
        changed_file = request.args.get('file')
        jupyter_token = request.args.get('jupyter_token')
        logger.debug(f"Received save hook for {changed_file} in {username}/{owner}/{labbook_name}")

        redis_conn = redis.Redis(db=1)
        lb_jupyter_token_key = '-'.join(['gmlb', username, owner, labbook_name, 'jupyter-token'])
        lb_active_key = f"{'|'.join([username, owner, labbook_name])}&is-busy*"

        r = redis_conn.get(lb_jupyter_token_key.encode())
        if r is None:
            logger.error(f"Could not find jupyter token for {username}/{owner}/{labbook_name}")
            abort(400)

        if r.decode() != jupyter_token:
            raise ValueError("Incoming jupyter token must be valid")

        if len(redis_conn.keys(lb_active_key.encode())) > 0:
            # A kernel in this project is still active. Don't save auto-commit because it can blow up the
            # repository size depending on what the user is doing
            logger.info(f"Skipping jupyter savehook for {username}/{owner}/{labbook_name} due to active kernel")
            return 'success'

        lb = InventoryManager().load_labbook(username, owner, labbook_name,
                                             author=get_logged_in_author())
        with lb.lock():
            lb.sweep_uncommitted_changes()

        logger.info(f"Jupyter save hook saved {changed_file} from {str(lb)}")
        return 'success'
    except Exception as err:
        logger.error(err)
        return abort(400)
コード例 #10
0
    def test_package_query_with_errors_conda(
            self, snapshot, fixture_working_dir_env_repo_scoped):
        """Test querying for package info"""
        # Create labbook
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook("default",
                               "default",
                               "labbook5conda",
                               description="my first labbook10000")

        query = """
                    {
                      labbook(owner: "default", name: "labbook5conda"){
                        id
                        checkPackages(packageInput: [
                          {manager: "conda3", package: "cdutil", version:"8.1"},
                          {manager: "conda3", package: "nltk", version:"100.00"},
                          {manager: "conda3", package: "python-coveralls", version:""},
                          {manager: "conda3", package: "thisshouldtotallyfail", version:"1.0"},
                          {manager: "conda3", package: "notarealpackage", version:""}]){
                          id
                          manager 
                          package
                          version
                          latestVersion
                          description
                          isValid     
                        }
                      }
                    }
                """

        snapshot.assert_match(
            fixture_working_dir_env_repo_scoped[2].execute(query))
コード例 #11
0
    def test_package_query(self, snapshot,
                           fixture_working_dir_env_repo_scoped):
        """Test querying for package info"""
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook("default",
                               "default",
                               "labbook6",
                               description="my first labbook10000")

        query = """
                    {
                      labbook(owner: "default", name: "labbook6"){
                        id
                        checkPackages(packageInput: [
                          {manager: "pip", package: "gtmunit1", version:"0.2.4"},
                          {manager: "pip", package: "gtmunit2", version:""}]){
                          id
                          manager 
                          package
                          version
                          isValid     
                        }
                      }
                    }
                """
        snapshot.assert_match(
            fixture_working_dir_env_repo_scoped[2].execute(query))
コード例 #12
0
    def mutate_and_process_upload(cls, info, upload_file_path, upload_filename,
                                  **kwargs):
        if not upload_file_path:
            logger.error('No file uploaded')
            raise ValueError('No file uploaded')

        username = get_logged_in_username()
        owner = kwargs.get('owner')
        dataset_name = kwargs.get('dataset_name')
        file_path = kwargs.get('file_path')

        try:
            ds = InventoryManager().load_dataset(username,
                                                 owner,
                                                 dataset_name,
                                                 author=get_logged_in_author())
            with ds.lock():
                if not os.path.abspath(upload_file_path):
                    raise ValueError(
                        f"Source file `{upload_file_path}` not an absolute path"
                    )

                if not os.path.isfile(upload_file_path):
                    raise ValueError(
                        f"Source file does not exist at `{upload_file_path}`")

                manifest = Manifest(ds, username)
                full_dst = manifest.get_abs_path(file_path)

                # If file (hard link) already exists, remove it first so you don't write to all files with same content
                if os.path.isfile(full_dst):
                    os.remove(full_dst)

                full_dst_base = os.path.dirname(full_dst)
                if not os.path.isdir(full_dst_base):
                    pathlib.Path(full_dst_base).mkdir(parents=True,
                                                      exist_ok=True)

                shutil.move(upload_file_path, full_dst)
                file_info = manifest.gen_file_info(file_path)

        finally:
            try:
                logger.debug(f"Removing temp file {upload_file_path}")
                os.remove(upload_file_path)
            except FileNotFoundError:
                pass

        # Create data to populate edge
        create_data = {
            'owner': owner,
            'name': dataset_name,
            'key': file_info['key'],
            '_file_info': file_info
        }

        # TODO: Fix cursor implementation. this currently doesn't make sense when adding edges
        cursor = base64.b64encode(f"{0}".encode('utf-8'))
        return AddDatasetFile(new_dataset_file_edge=DatasetFileConnection.Edge(
            node=DatasetFile(**create_data), cursor=cursor))
コード例 #13
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               dataset_name,
                               transaction_id,
                               cancel=False,
                               rollback=False,
                               client_mutation_id=None):
        username = get_logged_in_username()
        ds = InventoryManager().load_dataset(username,
                                             owner,
                                             dataset_name,
                                             author=get_logged_in_author())
        with ds.lock():
            if cancel and rollback:
                logger.warning(
                    f"Cancelled tx {transaction_id}, doing git reset")
                # TODO: Add ability to reset
            else:
                logger.info(
                    f"Done batch upload {transaction_id}, cancelled={cancel}")
                if cancel:
                    logger.warning("Sweeping aborted batch upload.")

                m = "Cancelled upload `{transaction_id}`. " if cancel else ''

                # Sweep up and process all files added during upload
                manifest = Manifest(ds, username)
                manifest.sweep_all_changes(upload=True, extra_msg=m)

        return CompleteDatasetUploadTransaction(success=True)
コード例 #14
0
 def test_remove_secrets_entry(self, fixture_working_dir_env_repo_scoped):
     client = fixture_working_dir_env_repo_scoped[2]
     im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
     lb = im.create_labbook("default", "default",
                            "unittest-mutation-remove-secret")
     SecretStore(lb, "default")['remove.key'] = '/mnt/nowhere'
     SecretStore(lb, "default")['absent.key'] = '/mnt/nowhere2'
     query = """
     mutation remove {
         removeSecretsEntry(input: {
             owner: "default",
             labbookName: "unittest-mutation-remove-secret",
             filename: "remove.key",
         }) {
             environment {
                 secretsFileMapping {
                     edges {
                         node {
                             filename
                             mountPath
                             isPresent
                         }
                     }
                 }
             }
         }
     }"""
     r = client.execute(query)
     assert 'errors' not in r
     n = r['data']['removeSecretsEntry']['environment'][
         'secretsFileMapping']['edges']
     assert len(n) == 1
     assert n[0]['node']['filename'] == 'absent.key'
     assert n[0]['node']['isPresent'] == False
コード例 #15
0
def remote_labbook_repo():

    # TODO: Remove after integration tests with LFS support are available
    conf_file, working_dir = _create_temp_work_dir(lfs_enabled=False)
    im = InventoryManager(conf_file)
    lb = im.create_labbook('test',
                           'test',
                           'sample-repo-lb',
                           description="my first labbook")
    bm = BranchManager(lb, username='******')
    bm.create_branch('testing-branch')

    #with tempfile.TemporaryDirectory() as tmpdirname:
    with open(os.path.join('/tmp', 'codefile.c'), 'wb') as codef:
        codef.write(b'// Cody McCodeface ...')

    FileOperations.insert_file(lb, "code", "/tmp/codefile.c")

    assert lb.is_repo_clean
    bm.workon_branch('master')

    # Location of the repo to push/pull from
    yield lb.root_dir
    shutil.rmtree(working_dir)
    try:
        os.remove('/tmp/codefile.c')
    except:
        pass
コード例 #16
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               name,
                               description,
                               repository,
                               base_id,
                               revision,
                               is_untracked=False,
                               client_mutation_id=None):
        username = get_logged_in_username()
        inv_manager = InventoryManager()
        if is_untracked:
            lb = inv_manager.create_labbook_disabled_lfs(
                username=username,
                owner=username,
                labbook_name=name,
                description=description,
                author=get_logged_in_author())
        else:
            lb = inv_manager.create_labbook(username=username,
                                            owner=username,
                                            labbook_name=name,
                                            description=description,
                                            author=get_logged_in_author())

        if is_untracked:
            FileOperations.set_untracked(lb, 'input')
            FileOperations.set_untracked(lb, 'output')
            input_set = FileOperations.is_set_untracked(lb, 'input')
            output_set = FileOperations.is_set_untracked(lb, 'output')
            if not (input_set and output_set):
                raise ValueError(
                    f'{str(lb)} untracking for input/output in malformed state'
                )
            if not lb.is_repo_clean:
                raise ValueError(
                    f'{str(lb)} should have clean Git state after setting for untracked'
                )

        adr = ActivityDetailRecord(ActivityDetailType.LABBOOK,
                                   show=False,
                                   importance=0)
        adr.add_value('text/plain', f"Created new LabBook: {username}/{name}")

        # Create activity record
        ar = ActivityRecord(ActivityType.LABBOOK,
                            message=f"Created new LabBook: {username}/{name}",
                            show=True,
                            importance=255,
                            linked_commit=lb.git.commit_hash)
        ar.add_detail_object(adr)

        store = ActivityStore(lb)
        store.create_activity_record(ar)

        cm = ComponentManager(lb)
        cm.add_base(repository, base_id, revision)

        return CreateLabbook(labbook=Labbook(owner=username, name=lb.name))
コード例 #17
0
ファイル: query.py プロジェクト: Pandulis/gigantum-client
    def resolve_repository_name_is_available(self, info, name: str):
        """Resolver to check if a repository name is in use locally or remotely

        Args:
            info:
            name: desired name for the repository

        Returns:

        """
        # Check if repository exists locally
        logged_in_username = get_logged_in_username()
        im = InventoryManager()
        if im.repository_exists(logged_in_username, logged_in_username, name):
            return False

        # Check if repository exists remotely
        remote_config = Configuration().get_remote_configuration()
        auth_service = None
        remote = None
        if remote_config:
            auth_service = remote_config.get('admin_service')
            remote = remote_config.get('git_remote')

        # Get collaborators from remote service
        mgr = GitLabManager(remote, auth_service, flask.g.access_token)
        if mgr.repository_exists(logged_in_username, name):
            return False

        # If you get here the name is available
        return True
コード例 #18
0
    def test_add_package_no_version(self, fixture_working_dir_env_repo_scoped,
                                    snapshot):
        """Test adding a package but omitting the version"""
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook('default',
                               'default',
                               'catbook-package-no-version',
                               description="LB to test package mutation")

        # Add a base image
        pkg_query = """
        mutation myPkgMutation {
          addPackageComponents (input: {
            owner: "default",
            labbookName: "catbook-package-no-version",
            packages: [{manager: "pip3", package: "gtmunit1"}]           
            
          }) {
            clientMutationId
            newPackageComponentEdges {
                node{
                  id
                  manager
                  package
                  version
                  fromBase
                }
                cursor 
            }
          }
        }
        """
        result = fixture_working_dir_env_repo_scoped[2].execute(pkg_query)
        assert "errors" in result
        assert result['errors'][0]['message'] == "'version'"
コード例 #19
0
ファイル: container.py プロジェクト: jjwatts/gigantum-client
def build_lb_image_for_env(mock_config_with_repo):
    # Create a labook
    im = InventoryManager(mock_config_with_repo[0])
    lb = im.create_labbook('unittester', 'unittester', "containerunittestbookenv",
                           description="Testing environment functions.")

    # Create Component Manager
    cm = ComponentManager(lb)
    # Add a component
    cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV)

    ib = ImageBuilder(lb)
    ib.assemble_dockerfile(write=True)
    client = get_docker_client()
    client.containers.prune()

    try:
        lb, docker_image_id = ContainerOperations.build_image(labbook=lb, username="******")

        yield lb, 'unittester'

    finally:
        shutil.rmtree(lb.root_dir)

        # Remove image if it's still there
        try:
            client.images.remove(docker_image_id, force=True, noprune=False)
        except:
            pass
コード例 #20
0
    def test_add_package(self, fixture_working_dir_env_repo_scoped, snapshot):
        """Test listing labbooks"""
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook('default',
                               'default',
                               'catbook-package-tester',
                               description="LB to test package mutation")

        # Add a base image
        pkg_query = """
        mutation myPkgMutation {
          addPackageComponents (input: {
            owner: "default",
            labbookName: "catbook-package-tester",
            packages: [{manager: "conda3", package: "python-coveralls", version: "2.9.1"}]           
            
          }) {
            clientMutationId
            newPackageComponentEdges {
                node{
                  id
                  manager
                  package
                  version
                  fromBase
                }
                cursor 
            }
          }
        }
        """
        snapshot.assert_match(
            fixture_working_dir_env_repo_scoped[2].execute(pkg_query))
コード例 #21
0
ファイル: labbook.py プロジェクト: Pandulis/gigantum-client
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               description_content,
                               client_mutation_id=None):
        username = get_logged_in_username()
        lb = InventoryManager().load_labbook(username,
                                             owner,
                                             labbook_name,
                                             author=get_logged_in_author())
        lb.description = description_content
        with lb.lock():
            lb.git.add(os.path.join(lb.config_path))
            commit = lb.git.commit('Updating description')

            adr = ActivityDetailRecord(ActivityDetailType.LABBOOK, show=False)
            adr.add_value('text/plain', "Updated description of Project")
            ar = ActivityRecord(ActivityType.LABBOOK,
                                message="Updated description of Project",
                                linked_commit=commit.hexsha,
                                tags=["labbook"],
                                show=False)
            ar.add_detail_object(adr)
            ars = ActivityStore(lb)
            ars.create_activity_record(ar)
        return SetLabbookDescription(success=True)
コード例 #22
0
    def test_file_node(self, fixture_working_dir):
        """Test listing labbook favorites"""
        im = InventoryManager(fixture_working_dir[0])
        lb = im.create_labbook("default",
                               "default",
                               "labbook1",
                               description="my first labbook1")

        # Setup some favorites in code
        with open(os.path.join(lb.root_dir, 'code', 'test1.txt'),
                  'wt') as test_file:
            test_file.write("blah1")

        query = """
                    {
                        node(id: "TGFiYm9va0ZpbGU6ZGVmYXVsdCZsYWJib29rMSZjb2RlJnRlc3QxLnR4dA==") {
                            ... on LabbookFile {
                                id
                                key
                                isDir
                                size
                            }
                        }
                    }
                    """

        r = fixture_working_dir[2].execute(query)
        assert 'errors' not in r
        assert r['data']['node']['isDir'] is False
        assert r['data']['node']['key'] == 'test1.txt'
        assert r['data']['node']['size'] == '5'
コード例 #23
0
    def test_verify_unmanaged_dataset(self, fixture_working_dir_dataset_tests):
        im = InventoryManager(fixture_working_dir_dataset_tests[0])
        ds = im.create_dataset('default',
                               'default',
                               "adataset",
                               storage_type="local_filesystem",
                               description="100")
        dataset_dir = ds.root_dir
        assert os.path.exists(dataset_dir) is True
        flask.g.access_token = "asdf"
        flask.g.id_token = "1234"

        # configure backend and local dir
        working_dir = fixture_working_dir_dataset_tests[1]
        ds.backend.set_default_configuration('default', 'asdf', '1234')
        current_config = ds.backend_config
        current_config['Data Directory'] = "test_dir"
        ds.backend_config = current_config
        test_dir = os.path.join(working_dir, "local_data", "test_dir")
        os.makedirs(test_dir)
        with open(os.path.join(test_dir, "test.txt"), 'wt') as temp:
            temp.write(f'dummy data: asdfasdf')

        query = """
                    mutation myMutation{
                      verifyDataset(input: {datasetOwner: "default", datasetName: "adataset"}) {
                          backgroundJobKey
                      }
                    }
                """
        result = fixture_working_dir_dataset_tests[2].execute(query)
        assert "errors" not in result
        assert "rq:job" in result['data']['verifyDataset']['backgroundJobKey']
コード例 #24
0
    def test_node_labbook_from_object(self, fixture_working_dir):
        im = InventoryManager(fixture_working_dir[0])
        lb = im.create_labbook("default",
                               "default",
                               "cat-lab-book1",
                               description="Test cat labbook from obj")

        query = """
                {
                    node(id: "TGFiYm9vazpkZWZhdWx0JmNhdC1sYWItYm9vazE=") {
                        ... on Labbook {
                            name
                            description
                            activeBranchName
                        }
                        id
                    }
                }
                """

        r = fixture_working_dir[2].execute(query)
        assert r['data']['node']['description'] == 'Test cat labbook from obj'
        assert r['data']['node'][
            'id'] == 'TGFiYm9vazpkZWZhdWx0JmNhdC1sYWItYm9vazE='
        assert r['data']['node']['name'] == 'cat-lab-book1'
        assert r['data']['node']['activeBranchName'] == 'master'
コード例 #25
0
    def test_readme(self, fixture_working_dir_env_repo_scoped, snapshot):
        """Test getting a labbook's readme document"""
        # Create labbook
        im = InventoryManager(fixture_working_dir_env_repo_scoped[0])
        lb = im.create_labbook("default",
                               "default",
                               "labbook77",
                               description="my first labbook10000")

        query = """
                    {
                      labbook(owner: "default", name: "labbook77") {
                        overview {
                          readme
                        }
                      }
                    }
                    """
        snapshot.assert_match(
            fixture_working_dir_env_repo_scoped[2].execute(query))

        lb.write_readme("##Summary\nThis is my readme!!")

        snapshot.assert_match(
            fixture_working_dir_env_repo_scoped[2].execute(query))
コード例 #26
0
    def test_cuda_version_property(self, mock_config_with_repo):
        """Test getting the cuda version"""
        im = InventoryManager(mock_config_with_repo[0])
        lb = im.create_labbook('test',
                               'test',
                               'labbook1',
                               description="my first labbook")

        assert lb.cuda_version is None

        # Add base without GPU support
        cm = ComponentManager(lb)
        cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV)
        base_yaml_file = glob.glob(
            os.path.join(lb.root_dir, '.gigantum', 'env', 'base', '*.yaml'))[0]

        assert lb.cuda_version is None

        # Fake a version
        with open(base_yaml_file, 'rt') as bf:
            base_data = yaml.safe_load(bf)

        base_data['cuda_version'] = '10.0'

        with open(base_yaml_file, 'wt') as bf:
            yaml.safe_dump(base_data, bf)

        assert lb.cuda_version == '10.0'
コード例 #27
0
 def test_query_owner(self, mock_config_file):
     inv_manager = InventoryManager(mock_config_file[0])
     lb = inv_manager.create_labbook("test",
                                     "test",
                                     "labbook1",
                                     description="my first labbook")
     assert "test" == inv_manager.query_owner(lb)
コード例 #28
0
def mock_create_labbooks(fixture_working_dir):
    # Create a labbook in the temporary directory
    config_file = fixture_working_dir[0]
    im = InventoryManager(fixture_working_dir[0])
    lb = im.create_labbook(UT_USERNAME,
                           UT_USERNAME,
                           UT_LBNAME,
                           description="Cats labbook 1")

    # Create a file in the dir
    with open(os.path.join(fixture_working_dir[1], 'unittest-examplefile'),
              'w') as sf:
        sf.write("test data")
        sf.seek(0)
    FileOperations.insert_file(lb, 'code', sf.name)

    assert os.path.isfile(
        os.path.join(lb.root_dir, 'code', 'unittest-examplefile'))

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)
    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())
        with app.app_context():
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()
            client = Client(
                schema,
                middleware=[DataloaderMiddleware(), error_middleware],
                context_value=ContextMock())
            yield lb, client, schema
    shutil.rmtree(fixture_working_dir, ignore_errors=True)
コード例 #29
0
 def test_create_dataset_invalid_storage_type(self, mock_config_file):
     inv_manager = InventoryManager(mock_config_file[0])
     auth = GitAuthor(name="username", email="*****@*****.**")
     with pytest.raises(ValueError):
         inv_manager.create_dataset("test", "test", "dataset1", "asdfdfgh",
                                    description="my first dataset",
                                    author=auth)
コード例 #30
0
    def test_link_unlink_dataset_with_repair(self, mock_labbook):
        inv_manager = InventoryManager(mock_labbook[0])
        lb = mock_labbook[2]
        ds = inv_manager.create_dataset("test",
                                        "test",
                                        "dataset100",
                                        "gigantum_object_v1",
                                        description="my dataset")

        # Fake publish to a local bare repo
        _MOCK_create_remote_repo2(ds, 'test', None, None)

        assert os.path.exists(os.path.join(lb.root_dir,
                                           '.gitmodules')) is False

        dataset_submodule_dir = os.path.join(lb.root_dir, '.gigantum',
                                             'datasets', 'test', 'dataset100')
        git_module_dir = os.path.join(lb.root_dir, '.git', 'modules',
                                      f"test&dataset100")

        # Add dirs as if lingering submodule config
        os.makedirs(dataset_submodule_dir)
        os.makedirs(git_module_dir)

        inv_manager.link_dataset_to_labbook(ds.remote, 'test', 'dataset100',
                                            lb)

        assert os.path.exists(os.path.join(lb.root_dir, '.gitmodules')) is True
        assert os.path.exists(dataset_submodule_dir) is True
        assert os.path.exists(os.path.join(dataset_submodule_dir,
                                           '.gigantum')) is True
        with open(os.path.join(lb.root_dir, '.gitmodules'), 'rt') as mf:
            data = mf.read()

        assert len(data) > 0