def mock_create_labbooks(fixture_working_dir):
    # Create a labbook in the temporary directory
    config_file = fixture_working_dir[0]
    lb = LabBook(fixture_working_dir[0])
    lb.new(owner={"username": UT_USERNAME},
           name=UT_LBNAME,
           description="Cats labbook 1")

    # Create a file in the dir
    with open(os.path.join(fixture_working_dir[1], 'unittest-examplefile'),
              'w') as sf:
        sf.write("test data")
        sf.seek(0)
    FileOperations.insert_file(lb, 'code', sf.name)

    assert os.path.isfile(
        os.path.join(lb.root_dir, 'code', 'unittest-examplefile'))

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)
    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())
        with app.app_context():
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()
            client = Client(
                schema,
                middleware=[LabBookLoaderMiddleware(), error_middleware],
                context_value=ContextMock())
            yield lb, client, schema
    shutil.rmtree(fixture_working_dir, ignore_errors=True)
Beispiel #2
0
def fixture_working_dir_with_cached_user():
    """A pytest fixture that creates a temporary working directory, config file, schema, and local user identity
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    user_dir = os.path.join(temp_dir, '.labmanager', 'identity')
    os.makedirs(user_dir)
    with open(os.path.join(user_dir, 'user.json'), 'wt') as user_file:
        json.dump(
            {
                "username": "******",
                "email": "*****@*****.**",
                "given_name": "Jane",
                "family_name": "Doe"
            }, user_file)

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        app = Flask("lmsrvlabbook")

        # Load configuration class into the flask application
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app.
            yield config_file, temp_dir  # name of the config file, temporary working directory

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
Beispiel #3
0
 def resolve_build_info(self, info):
     """Return this LabManager build info (hash, build timestamp, etc)"""
     build_info = Configuration().config.get('build_info') or {}
     return '-'.join([
         build_info.get('application', 'UnknownDate'),
         build_info.get('revision', 'UnknownHash'),
         build_info.get('built_on', 'UnknownApplication')
     ])
def fixture_working_dir_env_repo_scoped():
    """A pytest fixture that creates a temporary working directory, a config file to match, creates the schema,
    and populates the environment component repository.
    Class scope modifier attached
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    user_dir = os.path.join(temp_dir, '.labmanager', 'identity')
    os.makedirs(user_dir)
    with open(os.path.join(user_dir, 'user.json'), 'wt') as user_file:
        json.dump(
            {
                "username": "******",
                "email": "*****@*****.**",
                "given_name": "Jane",
                "family_name": "Doe"
            }, user_file)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    # get environment data and index
    erm = RepositoryManager(config_file)
    erm.update_repositories()
    erm.index_repositories()

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current user explicitly (this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(
                schema,
                middleware=[LabBookLoaderMiddleware(), error_middleware],
                context_value=ContextMock())

            yield config_file, temp_dir, client, schema  # name of the config file, temporary working directory, the schema

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
Beispiel #5
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               client_mutation_id=None):
        # Load LabBook
        username = get_logged_in_username()
        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks', labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)

        # Extract valid Bearer token
        if "HTTP_AUTHORIZATION" in info.context.headers.environ:
            token = parse_token(
                info.context.headers.environ["HTTP_AUTHORIZATION"])
        else:
            raise ValueError(
                "Authorization header not provided. Must have a valid session to query for collaborators"
            )

        # BVB -- Should this defer to `sync` if Labbook's remote is already set?
        # Otherwise, it will throw an exception, which may still be ok.
        wf = GitWorkflow(labbook=lb)
        wf.publish(username=username, access_token=token)

        return PublishLabbook(success=True)
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               client_mutation_id=None):

        username = get_logged_in_username()
        logger.info(f'Exporting LabBook: {username}/{owner}/{labbook_name}')

        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks', labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)

        job_metadata = {'method': 'export_labbook_as_zip', 'labbook': lb.key}
        job_kwargs = {
            'labbook_path': lb.root_dir,
            'lb_export_directory': os.path.join(working_directory, 'export')
        }
        dispatcher = Dispatcher()
        job_key = dispatcher.dispatch_task(jobs.export_labbook_as_zip,
                                           kwargs=job_kwargs,
                                           metadata=job_metadata)
        logger.info(
            f"Exporting LabBook {lb.root_dir} in background job with key {job_key.key_str}"
        )

        return ExportLabbook(job_key=job_key.key_str)
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               confirm,
                               client_mutation_id=None):
        if confirm is True:
            # Load config data
            configuration = Configuration().config

            # Extract valid Bearer token
            token = None
            if hasattr(info.context.headers, 'environ'):
                if "HTTP_AUTHORIZATION" in info.context.headers.environ:
                    token = parse_token(
                        info.context.headers.environ["HTTP_AUTHORIZATION"])
            if not token:
                raise ValueError(
                    "Authorization header not provided. Cannot perform remote delete operation."
                )

            # Get remote server configuration
            default_remote = configuration['git']['default_remote']
            admin_service = None
            for remote in configuration['git']['remotes']:
                if default_remote == remote:
                    admin_service = configuration['git']['remotes'][remote][
                        'admin_service']
                    break

            if not admin_service:
                raise ValueError('admin_service could not be found')

            # Perform delete operation
            mgr = GitLabManager(default_remote,
                                admin_service,
                                access_token=token)
            mgr.remove_labbook(owner, labbook_name)
            logger.info(
                f"Deleted {owner}/{labbook_name} from the remote repository {default_remote}"
            )

            # Remove locally any references to that cloud repo that's just been deleted.
            try:
                lb = LabBook()
                lb.from_name(get_logged_in_username(), owner, labbook_name)
                lb.remove_remote()
                lb.remove_lfs_remotes()
            except ValueError as e:
                logger.warning(e)

            return DeleteLabbook(success=True)
        else:
            logger.info(
                f"Dry run deleting {labbook_name} from remote repository -- not deleted."
            )
            return DeleteLabbook(success=False)
def fixture_working_dir_lfs_disabled():
    """A pytest fixture that creates a temporary working directory, config file, schema, and local user identity
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir(lfs_enabled=False)

    # Create user identity
    user_dir = os.path.join(temp_dir, '.labmanager', 'identity')
    os.makedirs(user_dir)
    with open(os.path.join(user_dir, 'user.json'), 'wt') as user_file:
        json.dump(
            {
                "username": "******",
                "email": "*****@*****.**",
                "given_name": "Jane",
                "family_name": "Doe"
            }, user_file)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current usert explicitly(this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(schema,
                            middleware=[LabBookLoaderMiddleware()],
                            context_value=ContextMock())

            yield config_file, temp_dir, client, schema  # name of the config file, temporary working directory, the schema

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
    def mutate_and_process_upload(cls, info, **kwargs):
        if not cls.upload_file_path:
            logger.error('No file uploaded')
            raise ValueError('No file uploaded')

        username = get_logged_in_username()
        logger.info(f"Handling ImportLabbook mutation: user={username},"
                    f"owner={username}. Uploaded file {cls.upload_file_path}")

        job_metadata = {'method': 'import_labbook_from_zip'}
        job_kwargs = {
            'archive_path': cls.upload_file_path,
            'username': username,
            'owner': username,
            'base_filename': cls.filename
        }
        dispatcher = Dispatcher()
        job_key = dispatcher.dispatch_task(jobs.import_labboook_from_zip,
                                           kwargs=job_kwargs,
                                           metadata=job_metadata)
        logger.info(
            f"Importing LabBook {cls.upload_file_path} in background job with key {job_key.key_str}"
        )

        assumed_lb_name = cls.filename.replace('.lbk', '').split('_')[0]
        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             username, 'labbooks',
                                             assumed_lb_name)
        build_img_kwargs = {
            'path': inferred_lb_directory,
            'username': username,
            'nocache': True
        }
        build_img_metadata = {
            'method': 'build_image',
            # TODO - we need labbook key but labbook is not available...
            'labbook': f"{username}|{username}|{assumed_lb_name}"
        }
        logger.warning(
            f"Using assumed labbook name {build_img_metadata['labbook']}, better solution needed."
        )
        build_image_job_key = dispatcher.dispatch_task(
            jobs.build_labbook_image,
            kwargs=build_img_kwargs,
            dependent_job=job_key,
            metadata=build_img_metadata)
        logger.info(f"Adding dependent job {build_image_job_key} to build "
                    f"Docker image for labbook `{inferred_lb_directory}`")

        return ImportLabbook(import_job_key=job_key.key_str,
                             build_image_job_key=build_image_job_key.key_str)
    def mutate_and_process_upload(cls,
                                  info,
                                  owner,
                                  labbook_name,
                                  section,
                                  file_path,
                                  chunk_upload_params,
                                  transaction_id,
                                  client_mutation_id=None):
        if not cls.upload_file_path:
            logger.error('No file uploaded')
            raise ValueError('No file uploaded')

        try:
            username = get_logged_in_username()
            working_directory = Configuration().config['git'] \
                ['working_directory']
            inferred_lb_directory = os.path.join(working_directory, username,
                                                 owner, 'labbooks',
                                                 labbook_name)
            lb = LabBook(author=get_logged_in_author())
            lb.from_directory(inferred_lb_directory)
            dstpath = os.path.join(os.path.dirname(file_path), cls.filename)

            fops = FileOperations.put_file(labbook=lb,
                                           section=section,
                                           src_file=cls.upload_file_path,
                                           dst_path=dstpath,
                                           txid=transaction_id)
        finally:
            try:
                logger.debug(f"Removing temp file {cls.upload_file_path}")
                os.remove(cls.upload_file_path)
            except FileNotFoundError:
                pass

        # Create data to populate edge
        create_data = {
            'owner': owner,
            'name': labbook_name,
            'section': section,
            'key': fops['key'],
            '_file_info': fops
        }

        # TODO: Fix cursor implementation..
        # this currently doesn't make sense when adding edges
        cursor = base64.b64encode(f"{0}".encode('utf-8'))
        return AddLabbookFile(new_labbook_file_edge=LabbookFileConnection.Edge(
            node=LabbookFile(**create_data), cursor=cursor))
Beispiel #11
0
    def mutate_and_get_payload(cls, root, input, client_mutation_id=None):
        # Check if there is a local user identity, remove if needed
        identity_file = os.path.join(Configuration().config['git']['working_directory'],
                                     '.labmanager', 'identity', 'user.json')
        if os.path.exists(identity_file):
            os.remove(identity_file)
        else:
            # Does not exist
            logger.warning("Attempted to remove user identity, but no identity is stored locally.")

        # Wipe current user from session
        flask.g.user_obj = None

        return RemoveUserIdentity(user_identity_edge=UserIdentity())
Beispiel #12
0
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               force=False,
                               client_mutation_id=None):
        # Load LabBook
        username = get_logged_in_username()
        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks', labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)

        # Extract valid Bearer token
        token = None
        if hasattr(info.context.headers, 'environ'):
            if "HTTP_AUTHORIZATION" in info.context.headers.environ:
                token = parse_token(
                    info.context.headers.environ["HTTP_AUTHORIZATION"])

        if not token:
            raise ValueError(
                "Authorization header not provided. Must have a valid session to query for collaborators"
            )

        default_remote = lb.labmanager_config.config['git']['default_remote']
        admin_service = None
        for remote in lb.labmanager_config.config['git']['remotes']:
            if default_remote == remote:
                admin_service = lb.labmanager_config.config['git']['remotes'][
                    remote]['admin_service']
                break

        if not admin_service:
            raise ValueError('admin_service could not be found')

        # Configure git creds
        mgr = GitLabManager(default_remote, admin_service, access_token=token)
        mgr.configure_git_credentials(default_remote, username)

        wf = GitWorkflow(labbook=lb)
        cnt = wf.sync(username=username, force=force)

        # Create an updated graphne Labbook instance to return for convenience of Relay.
        updatedl = LabbookObject(owner=owner, name=labbook_name)
        return SyncLabbook(update_count=cnt, updated_labbook=updatedl)
    def prior_mutate_and_get_payload(cls,
                                     root,
                                     info,
                                     owner,
                                     original_labbook_name,
                                     new_labbook_name,
                                     client_mutation_id=None):
        # NOTE!!! This is the code that was originally to rename.
        # Temporarily, rename functionality is disabled.
        # Load LabBook
        username = get_logged_in_username()

        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks',
                                             original_labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)

        # Image names
        old_tag = '{}-{}-{}'.format(username, owner, original_labbook_name)
        new_tag = '{}-{}-{}'.format(username, owner, new_labbook_name)

        # Rename LabBook
        lb.rename(new_labbook_name)
        logger.info(
            f"Renamed LabBook from `{original_labbook_name}` to `{new_labbook_name}`"
        )

        # Build image with new name...should be fast and use the Docker cache
        client = get_docker_client()
        image_builder = ImageBuilder(lb.root_dir)
        image_builder.build_image(docker_client=client,
                                  image_tag=new_tag,
                                  username=username,
                                  background=True)

        # Delete old image if it had previously been built successfully
        try:
            client.images.get(old_tag)
            client.images.remove(old_tag)
        except ImageNotFound:
            logger.warning(
                f"During renaming, original image {old_tag} not found, removal skipped."
            )

        return RenameLabbook(success=True)
def _create_temp_work_dir(lfs_enabled: bool = True):
    """Helper method to create a temporary working directory and associated config file"""
    # Create a temporary working directory
    temp_dir = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex)
    os.makedirs(temp_dir)

    config = Configuration()
    # Make sure the "test" environment components are always used
    config.config["environment"]["repo_url"] = [
        "https://github.com/gig-dev/components2.git"
    ]
    config.config["flask"]["DEBUG"] = False
    # Set the working dir to the new temp dir
    config.config["git"]["working_directory"] = temp_dir
    config.config["git"]["lfs_enabled"] = lfs_enabled
    # Set the auth0 client to the test client (only contains 1 test user and is partitioned from prod)
    config.config["auth"]["audience"] = "io.gigantum.api.dev"
    config_file = os.path.join(temp_dir, "temp_config.yaml")
    config.save(config_file)
    os.environ['HOST_WORK_DIR'] = temp_dir

    return config_file, temp_dir
Beispiel #15
0
def _create_temp_work_dir():
    """Helper method to create a temporary working directory and associated config file"""
    # Create a temporary working directory
    temp_dir = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex)
    os.makedirs(temp_dir)

    config = Configuration()
    config.config["git"]["working_directory"] = temp_dir
    config.config["auth"]["audience"] = "io.gigantum.api.dev"
    config_file = os.path.join(temp_dir, "temp_config.yaml")
    config.save(config_file)

    return config_file, temp_dir
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               no_cache=False,
                               client_mutation_id=None):
        username = get_logged_in_username()

        if BuildImage.get_container_status(labbook_name, owner, username):
            raise ValueError(
                f'Cannot build image for running container {owner}/{labbook_name}'
            )

        labbook_dir = os.path.expanduser(
            os.path.join(Configuration().config['git']['working_directory'],
                         username, owner, 'labbooks', labbook_name))

        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(labbook_dir)

        # Generate Dockerfile
        ib = ImageBuilder(lb)
        ib.assemble_dockerfile(write=True)

        # Kick off building in a background thread
        d = Dispatcher()
        build_kwargs = {
            'path': labbook_dir,
            'username': username,
            'nocache': no_cache
        }

        metadata = {'labbook': lb.key, 'method': 'build_image'}

        res = d.dispatch_task(jobs.build_labbook_image,
                              kwargs=build_kwargs,
                              metadata=metadata)

        return BuildImage(environment=Environment(owner=owner,
                                                  name=labbook_name),
                          background_job_key=res.key_str)
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               section,
                               file_path,
                               is_directory=False,
                               client_mutation_id=None):
        username = get_logged_in_username()
        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks', labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)
        lb.delete_file(section=section,
                       relative_path=file_path,
                       directory=is_directory)

        return DeleteLabbookFile(success=True)
 def mutate_and_get_payload(cls,
                            root,
                            info,
                            owner,
                            labbook_name,
                            transaction_id,
                            cancel=False,
                            rollback=False,
                            client_mutation_id=None):
     username = get_logged_in_username()
     working_directory = Configuration().config['git']['working_directory']
     inferred_lb_directory = os.path.join(working_directory, username,
                                          owner, 'labbooks', labbook_name)
     lb = LabBook(author=get_logged_in_author())
     lb.from_directory(inferred_lb_directory)
     FileOperations.complete_batch(lb,
                                   transaction_id,
                                   cancel=cancel,
                                   rollback=rollback)
     return CompleteBatchUploadTransaction(success=True)
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               section,
                               directory,
                               client_mutation_id=None):
        username = get_logged_in_username()

        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks', labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)
        lb.makedir(os.path.join(section, directory),
                   create_activity_record=True)
        logger.info(f"Made new directory in `{directory}`")

        # Prime dataloader with labbook you already loaded
        dataloader = LabBookLoader()
        dataloader.prime(f"{owner}&{labbook_name}&{lb.name}", lb)

        # Create data to populate edge
        file_info = lb.get_file_info(section, directory)
        create_data = {
            'owner': owner,
            'name': labbook_name,
            'section': section,
            'key': file_info['key'],
            '_file_info': file_info
        }

        # TODO: Fix cursor implementation, this currently doesn't make sense
        cursor = base64.b64encode(f"{0}".encode('utf-8'))

        return MakeLabbookDirectory(
            new_labbook_file_edge=LabbookFileConnection.Edge(
                node=LabbookFile(**create_data), cursor=cursor))
    def mutate_and_get_payload(cls,
                               root,
                               info,
                               owner,
                               labbook_name,
                               confirm,
                               client_mutation_id=None):
        username = get_logged_in_username()
        working_directory = Configuration().config['git']['working_directory']
        inferred_lb_directory = os.path.join(working_directory, username,
                                             owner, 'labbooks', labbook_name)
        lb = LabBook(author=get_logged_in_author())
        lb.from_directory(inferred_lb_directory)

        if confirm:
            logger.warning(f"Deleting {str(lb)}...")
            try:
                lb, stopped = ContainerOperations.stop_container(
                    labbook=lb, username=username)
            except OSError:
                pass
            lb, docker_removed = ContainerOperations.delete_image(
                labbook=lb, username=username)
            if not docker_removed:
                raise ValueError(
                    f'Cannot delete docker image for {str(lb)} - unable to delete LB from disk'
                )
            shutil.rmtree(lb.root_dir, ignore_errors=True)
            if os.path.exists(lb.root_dir):
                logger.error(
                    f'Deleted {str(lb)} but root directory {lb.root_dir} still exists!'
                )
                return DeleteLabbook(success=False)
            else:
                return DeleteLabbook(success=True)
        else:
            logger.info(f"Dry run in deleting {str(lb)} -- not deleted.")
            return DeleteLabbook(success=False)
def fixture_working_dir_populated_scoped():
    """A pytest fixture that creates a temporary working directory, a config file to match, creates the schema,
    and populates the environment component repository.
    Class scope modifier attached
    """
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    user_dir = os.path.join(temp_dir, '.labmanager', 'identity')
    os.makedirs(user_dir)
    with open(os.path.join(user_dir, 'user.json'), 'wt') as user_file:
        json.dump(
            {
                "username": "******",
                "email": "*****@*****.**",
                "given_name": "Jane",
                "family_name": "Doe"
            }, user_file)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    # Create a bunch of lab books
    lb = LabBook(config_file)

    lb.new(owner={"username": "******"},
           name="labbook1",
           description="Cats labbook 1")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook2",
           description="Dogs labbook 2")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook3",
           description="Mice labbook 3")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook4",
           description="Horses labbook 4")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook5",
           description="Cheese labbook 5")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook6",
           description="Goat labbook 6")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook7",
           description="Turtle labbook 7")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook8",
           description="Lamb labbook 8")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook9",
           description="Taco labbook 9")
    time.sleep(1.1)
    lb.new(owner={"username": "******"},
           name="labbook-0",
           description="This should not show up.")

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current user explicitly (this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(schema,
                            middleware=[LabBookLoaderMiddleware()],
                            context_value=ContextMock())

            yield config_file, temp_dir, client, schema

    # Remove the temp_dir
    shutil.rmtree(temp_dir)
    def resolve_remote_labbooks(self, info, sort: str, reverse: bool,
                                **kwargs):
        """Method to return a all RemoteLabbook instances for the logged in user

        This is a remote call, so should be fetched on its own and only when needed. The user must have a valid
        session for data to be returned.

        It is recommended to use large page size (e.g. 50-100). This is due to how the remote server returns all the
        available data at once, so it is more efficient to load a lot of records at a time.

        Args:
            sort(sort_mode): String specifying how labbooks should be sorted
            reverse(bool): Reverse sorting if True

        Supported sorting modes:
            - az: naturally sort
            - created_on: sort by creation date, newest first
            - modified_on: sort by modification date, newest first

        Returns:
            list(Labbook)
        """
        # Load config data
        configuration = Configuration().config

        # Extract valid Bearer token
        token = None
        if hasattr(info.context.headers, 'environ'):
            if "HTTP_AUTHORIZATION" in info.context.headers.environ:
                token = parse_token(
                    info.context.headers.environ["HTTP_AUTHORIZATION"])
        if not token:
            raise ValueError(
                "Authorization header not provided. Cannot list remote LabBooks."
            )

        # Get remote server configuration
        default_remote = configuration['git']['default_remote']
        admin_service = None
        for remote in configuration['git']['remotes']:
            if default_remote == remote:
                admin_service = configuration['git']['remotes'][remote][
                    'admin_service']
                break

        if not admin_service:
            raise ValueError('admin_service could not be found')

        # Query backend for data
        mgr = GitLabManager(default_remote, admin_service, access_token=token)
        edges = mgr.list_labbooks(sort_mode=sort, reverse=reverse)
        cursors = [
            base64.b64encode("{}".format(cnt).encode("UTF-8")).decode("UTF-8")
            for cnt, x in enumerate(edges)
        ]

        # Process slicing and cursor args
        lbc = ListBasedConnection(edges, cursors, kwargs)
        lbc.apply()

        # Get Labbook instances
        edge_objs = []
        for edge, cursor in zip(lbc.edges, lbc.cursors):
            create_data = {
                "id": "{}&{}".format(edge["namespace"], edge["labbook_name"]),
                "name": edge["labbook_name"],
                "owner": edge["namespace"],
                "description": edge["description"],
                "creation_date_utc": edge["created_on"],
                "modified_date_utc": edge["modified_on"]
            }

            edge_objs.append(
                RemoteLabbookConnection.Edge(node=RemoteLabbook(**create_data),
                                             cursor=cursor))

        return RemoteLabbookConnection(edges=edge_objs,
                                       page_info=lbc.page_info)
Beispiel #23
0
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from flask import Flask
from flask import Blueprint
from flask_graphql import GraphQLView
import graphene

from lmsrvlabbook.api import LabbookQuery, LabbookMutations

from lmcommon.configuration import Configuration

# Load config data for the LabManager instance
config = Configuration()

# Create Blueprint
labbook_service = Blueprint('labbook_service', __name__)

# Add route
labbook_service.add_url_rule('/labbook/',
                             view_func=GraphQLView.as_view(
                                 'graphql',
                                 schema=graphene.Schema(
                                     query=LabbookQuery,
                                     mutation=LabbookMutations),
                                 graphiql=config.config["flask"]["DEBUG"]))

# If running blueprint script directly, spin a dev server
if __name__ == '__main__':
def build_image_for_jupyterlab():
    # Create temp dir
    config_file, temp_dir = _create_temp_work_dir()

    # Create user identity
    user_dir = os.path.join(temp_dir, '.labmanager', 'identity')
    os.makedirs(user_dir)
    with open(os.path.join(user_dir, 'user.json'), 'wt') as user_file:
        json.dump(
            {
                "username": "******",
                "email": "*****@*****.**",
                "given_name": "unittester",
                "family_name": "tester"
            }, user_file)

    # Create test client
    schema = graphene.Schema(query=LabbookQuery, mutation=LabbookMutations)

    # get environment data and index
    erm = RepositoryManager(config_file)
    erm.update_repositories()
    erm.index_repositories()

    with patch.object(Configuration, 'find_default_config',
                      lambda self: config_file):
        # Load User identity into app context
        app = Flask("lmsrvlabbook")
        app.config["LABMGR_CONFIG"] = Configuration()
        app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

        with app.app_context():
            # within this block, current_app points to app. Set current user explicitly (this is done in the middleware)
            flask.g.user_obj = app.config["LABMGR_ID_MGR"].get_user_profile()

            # Create a test client
            client = Client(
                schema,
                middleware=[LabBookLoaderMiddleware(), error_middleware],
                context_value=ContextMock())

            # Create a labook
            lb = LabBook(config_file)
            lb.new(name="containerunittestbook",
                   description="Testing docker building.",
                   owner={"username": "******"})

            # Create Component Manager
            cm = ComponentManager(lb)
            # Add a component
            cm.add_component("base", ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE,
                             ENV_UNIT_TEST_REV)
            cm.add_packages("pip3", [{
                "manager": "pip3",
                "package": "requests",
                "version": "2.18.4"
            }])

            ib = ImageBuilder(lb)
            ib.assemble_dockerfile(write=True)
            docker_client = get_docker_client()

            try:
                lb, docker_image_id = ContainerOperations.build_image(
                    labbook=lb, username="******")

                yield lb, ib, docker_client, docker_image_id, client, "unittester"

            finally:
                shutil.rmtree(lb.root_dir)
                try:
                    docker_client.containers.get(docker_image_id).stop()
                    docker_client.containers.get(docker_image_id).remove()
                except:
                    pass

                try:
                    docker_client.images.remove(docker_image_id,
                                                force=True,
                                                noprune=False)
                except:
                    pass
from lmcommon.logging import LMLogger
from lmcommon.environment import RepositoryManager
from lmcommon.auth.identity import AuthenticationError, get_identity_manager
from lmcommon.labbook.lock import reset_all_locks
from lmcommon.labbook import LabBook
from lmsrvcore.auth.user import get_logged_in_author

logger = LMLogger.get_logger()

# Create Flask app
app = Flask("lmsrvlabbook")

# Load configuration class into the flask application
random_bytes = os.urandom(32)
app.config["SECRET_KEY"] = base64.b64encode(random_bytes).decode('utf-8')
app.config["LABMGR_CONFIG"] = config = Configuration()
app.config["LABMGR_ID_MGR"] = get_identity_manager(Configuration())

if config.config["flask"]["allow_cors"]:
    # Allow CORS
    CORS(app, max_age=7200)

# Set Debug mode
app.config['DEBUG'] = config.config["flask"]["DEBUG"]

# Register LabBook service
app.register_blueprint(blueprint.complete_labbook_service)

# Configure CHP
try:
    api_prefix = app.config["LABMGR_CONFIG"].config['proxy'][