Пример #1
0
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    io_config = {
        "base_dir_count": 2,
        "sub_dir_count": 2,
        "Files": {
            "files_in_dir": 2,
            "size": 10
        },
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("io_config: %s\n" % io_config)

    log.info("initiating nfs ganesha")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )

    nfs_ganesha.initialize()

    log.info("authenticating rgw user")

    rgw_auth = Authenticate(
        user_id=nfs_ganesha.user_id,
        access_key=nfs_ganesha.access_key,
        secret_key=nfs_ganesha.secret_key,
    )

    auth = rgw_auth.do_auth()

    log.info("begin IO")

    bdir = BaseDir(
        int(io_config["base_dir_count"]),
        rgw_auth.json_file_upload,
        ganesha_test_config["mount_point"],
        auth["conn"],
    )

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload,
                              auth["conn"])
    subdir.create()

    op_status = subdir.operation_on_nfs(ganesha_test_config["mount_point"],
                                        op_code="move")

    verification = {"key": True, "move": True}

    for ops in op_status:

        if not ops["op_code_status"]:
            verification["move"] = False
            break

        else:
            log.info("verification starts")

            log.info("key verifcation starts on s3")
            kstatus = subdir.verify_s3()
            log.info("key verificaion complete: \n%s" % kstatus)

            for ks in kstatus:

                if ks["type"] == "file":
                    if ks["exists"]:
                        verification["key"] = True
                    else:
                        verification["key"] = False

    return verification
Пример #2
0
def test(yaml_file_path):

    ganesha_test_config = {
        'mount_point': 'ganesha-mount',
        'rgw_user_info': yaml_file_path
    }

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    io_config = {
        'base_dir_count': 2,
        'sub_dir_count': 2,
        'Files': {
            'files_in_dir': 2,
            'size': 10
        }
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info('io_config: %s\n' % io_config)

    log.info('initiating nfs ganesha')

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config['mount_point'],
        yaml_fname=ganesha_test_config['rgw_user_info'])

    nfs_ganesha.initialize()

    log.info('authenticating rgw user')

    rgw_auth = Authenticate(user_id=nfs_ganesha.user_id,
                            access_key=nfs_ganesha.access_key,
                            secret_key=nfs_ganesha.secret_key)

    auth = rgw_auth.do_auth()

    log.info('begin IO')

    bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload,
                   ganesha_test_config['mount_point'], auth['conn'])

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload,
                              auth['conn'])
    subdir.create()

    log.info('verification starts')

    time.sleep(15)

    bstatus = bdir.verify_s3()

    log.info('bucket verification complete')

    kstatus = subdir.verify_s3()

    log.info('key verification complete')

    verification = {}

    for bs in bstatus:

        if not bs['exists']:
            verification['bucket'] = False
            break
        else:
            verification['bucket'] = True

    for ks in kstatus:

        if not ks['exists']:
            verification['key'] = False

        if ks['type'] == 'file':

            if not ks['md5_matched']:
                verification['key'] = False
                break

            if not ks['size_matched']:
                verification['key'] = False
                break
        else:
            verification['key'] = True

    return verification
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    verification = {"bucket": False, "key": False}

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    io_config = {
        "base_dir_count": 2,
        "sub_dir_count": 2,
        "Files": {"files_in_dir": 2, "size": 10},
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("io_config: %s\n" % io_config)

    log.info("initiating nfs ganesha")

    log.info(
        "resetting rgw_user_info yaml file with null values to that new rgw user will be created and with new_config"
    )

    rgw_user_config_ops = RGWUserConfigOps(
        yaml_fname=ganesha_test_config["rgw_user_info"]
    )
    rgw_user_config_ops.update_config()

    log.info("will take new config and start the basic IO test")
    log.info("--------------------------------------------------")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )

    nfs_ganesha.initialize()

    log.info("authenticating rgw user")

    rgw_auth = Authenticate(
        user_id=nfs_ganesha.user_id,
        access_key=nfs_ganesha.access_key,
        secret_key=nfs_ganesha.secret_key,
    )

    auth = rgw_auth.do_auth()

    log.info("begin IO")

    bdir = BaseDir(
        int(io_config["base_dir_count"]),
        rgw_auth.json_file_upload,
        ganesha_test_config["mount_point"],
        auth["conn"],
    )

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"])
    subdir.create()

    log.info("verification starts")

    time.sleep(15)

    bstatus = bdir.verify_s3()

    log.info("bucket verification complete")

    kstatus = subdir.verify_s3()

    log.info("key verification complete")

    for bs in bstatus:

        if not bs["exists"]:
            verification["bucket"] = False
            break
        else:
            verification["bucket"] = True

    for ks in kstatus:

        if not ks["exists"]:
            verification["key"] = False

        if ks["type"] == "file":

            if not ks["md5_matched"]:
                verification["key"] = False
                break

            if not ks["size_matched"]:
                verification["key"] = False
                break
        else:
            verification["key"] = True

    return verification
Пример #4
0
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("initiating nfs ganesha")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 1
    config.objects_count = 2
    config.objects_size_range = {"min": 10, "max": 50}

    log.info("begin IO")

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(
        count=None,
        json_fname=rgw.json_file_upload,
        mount_point=ganesha_test_config["mount_point"],
        auth=rgw.connection["conn"],
    )

    subd = SubdirAndObjects(
        base_dir_list=None,
        config=None,
        json_fname=rgw.json_file_upload,
        auth=rgw.connection["conn"],
    )

    ks_op_status = subd.operation_on_s3(op_code="move")

    time.sleep(300)  # wait for 5 mins

    # after move, verify on nfs for the changes

    verification = {"delete": True, "key": True}

    for status in ks_op_status:

        if not status["op_code_status"]:
            verification["delete"] = False
            break

    if verification["delete"]:

        log.info("verification starts")

        log.info("key verificaion starts")
        kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"])
        log.info("key verification complete: %s" % kstatus)

        for ks in kstatus:
            if ks["exists"]:
                verification["key"] = True

            else:
                verification["key"] = False

    return verification
def test(yaml_file_path):

    ganesha_test_config = {'mount_point': 'ganesha-mount',
                           'rgw_user_info': yaml_file_path}

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    log.info('initiating nfs ganesha')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(mount_point=ganesha_test_config['mount_point'],
                                 yaml_fname=ganesha_test_config['rgw_user_info'])
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 2
    config.objects_count = 2
    config.objects_size_range = {'min': 10, 'max': 50}

    log.info('begin IO')

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config['mount_point'],
                   auth=rgw.connection['conn'])

    subd = SubdirAndObjects(base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection['conn'])

    ks_op_status = subd.operation_on_s3(op_code='delete')

    verification= {'delete': True,
                   'key': True}

    for status in ks_op_status:

        if not status['op_code_status']:
            verification['delete'] = False
            break

    if verification['delete']:

        log.info('verification starts')

        log.info('key verificaion starts')
        kstatus = subd.verify_nfs(mount_point=ganesha_test_config['mount_point'])
        log.info('key verification complete: %s' % kstatus)

        for ks in kstatus:
            if not ks['exists']:
                verification['key'] = True

            if ks['exists']:
                verification['key'] = False


    return verification
def test(yaml_file_path):

    ganesha_test_config = {
        'mount_point': 'ganesha-mount',
        'rgw_user_info': yaml_file_path
    }

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    io_config = {
        'base_dir_count': 2,
        'sub_dir_count': 2,
        'Files': {
            'files_in_dir': 2,
            'size': 10
        }
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info('io_config: %s\n' % io_config)

    log.info('initiating nfs ganesha')

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config['mount_point'],
        yaml_fname=ganesha_test_config['rgw_user_info'])

    nfs_ganesha.initialize()

    log.info('authenticating rgw user')

    rgw_auth = Authenticate(user_id=nfs_ganesha.user_id,
                            access_key=nfs_ganesha.access_key,
                            secret_key=nfs_ganesha.secret_key)

    auth = rgw_auth.do_auth()

    log.info('begin IO')

    bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload,
                   ganesha_test_config['mount_point'], auth['conn'])

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload,
                              auth['conn'])
    subdir.create()

    op_status = subdir.operation_on_nfs(ganesha_test_config['mount_point'],
                                        op_code='move')

    verification = {'key': True, 'move': True}

    for ops in op_status:

        if not ops['op_code_status']:
            verification['move'] = False
            break

        else:
            log.info('verification starts')

            log.info('key verifcation starts on s3')
            kstatus = subdir.verify_s3()
            log.info('key verificaion complete: \n%s' % kstatus)

            for ks in kstatus:

                if ks['type'] == 'file':
                    if ks['exists']:
                        verification['key'] = True
                    else:
                        verification['key'] = False

    return verification
def test(yaml_file_path):
    ganesha_test_config = {
        'mount_point': 'ganesha-mount',
        'rgw_user_info': yaml_file_path
    }

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    io_config = {
        'base_dir_count': 1,
        'sub_dir_count': 1,
        'Files': {
            'files_in_dir': 1,
            'size': 10
        }
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info('io_config: %s\n' % io_config)

    log.info('initiating nfs ganesha')

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config['mount_point'],
        yaml_fname=ganesha_test_config['rgw_user_info'])

    nfs_ganesha.initialize()

    log.info('authenticating rgw user')

    rgw_auth = Authenticate(user_id=nfs_ganesha.user_id,
                            access_key=nfs_ganesha.access_key,
                            secret_key=nfs_ganesha.secret_key)

    auth = rgw_auth.do_auth()

    log.info('begin IO')

    bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload,
                   ganesha_test_config['mount_point'], auth['conn'])

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload,
                              auth['conn'])
    subdir.create(file_type='text')

    log.info('operation starting: %s' % 'edit')

    op_status = subdir.operation_on_nfs(ganesha_test_config['mount_point'],
                                        op_code='edit')

    for op in op_status:

        if not op['op_code_status']:
            log.error('operation failed')
            exit(1)

    log.info('verification starts')

    kstatus = subdir.verify_s3(op_type='edit')
    log.info('key verificaion complete: \n%s' % kstatus)

    return kstatus
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    log.info("initiating nfs ganesha")

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 5
    config.objects_count = 2
    config.objects_size_range = {"min": 10, "max": 50}

    log.info("begin IO")

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(
        count=None,
        json_fname=rgw.json_file_upload,
        mount_point=ganesha_test_config["mount_point"],
        auth=rgw.connection,
    )

    subd = SubdirAndObjects(
        base_dir_list=None,
        config=None,
        json_fname=rgw.json_file_upload,
        auth=rgw.connection,
    )

    time.sleep(15)

    log.info("verification starts")

    log.info("bucket verification starts")
    bstatus = bdir.verify_nfs()
    log.info("bucket verification complete:%s" % bstatus)

    log.info("key verificaion starts")
    kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"])
    log.info("key verification complete: %s" % kstatus)

    verification = {"bucket": True, "key": True}

    if not bstatus:
        verification["bucket"] = False
    else:
        verification["bucket"] = True

    for ks in kstatus:

        if not ks["exists"]:
            verification["key"] = False

        if not ks["md5_matched"]:
            verification["key"] = False
            break

        if not ks["size_matched"]:
            verification["key"] = False
            break

    return verification
def test(yaml_file_path):
    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    io_config = {
        "base_dir_count": 1,
        "sub_dir_count": 1,
        "Files": {"files_in_dir": 1, "size": 10},
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("io_config: %s\n" % io_config)

    log.info("initiating nfs ganesha")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )

    nfs_ganesha.initialize()

    log.info("authenticating rgw user")

    rgw_auth = Authenticate(
        user_id=nfs_ganesha.user_id,
        access_key=nfs_ganesha.access_key,
        secret_key=nfs_ganesha.secret_key,
    )

    auth = rgw_auth.do_auth()

    log.info("begin IO")

    bdir = BaseDir(
        int(io_config["base_dir_count"]),
        rgw_auth.json_file_upload,
        ganesha_test_config["mount_point"],
        auth["conn"],
    )

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"])
    subdir.create(file_type="text")

    log.info("operation starting: %s" % "edit")

    op_status = subdir.operation_on_nfs(
        ganesha_test_config["mount_point"], op_code="edit"
    )

    for op in op_status:

        if not op["op_code_status"]:
            log.error("operation failed")
            exit(1)

    log.info("verification starts")

    kstatus = subdir.verify_s3(op_type="edit")
    log.info("key verificaion complete: \n%s" % kstatus)

    return kstatus
def test(yaml_file_path):

    ganesha_test_config = {
        'mount_point': 'ganesha-mount',
        'rgw_user_info': yaml_file_path
    }

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    log.info('initiating nfs ganesha')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config['mount_point'],
        yaml_fname=ganesha_test_config['rgw_user_info'])
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 1
    config.objects_count = 1
    config.objects_size_range = {'min': 10, 'max': 50}

    log.info('begin IO')

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(count=None,
                   json_fname=rgw.json_file_upload,
                   mount_point=ganesha_test_config['mount_point'],
                   auth=rgw.connection['conn'])

    subd = SubdirAndObjects(base_dir_list=None,
                            config=None,
                            json_fname=rgw.json_file_upload,
                            auth=rgw.connection['conn'],
                            download_json_fname=rgw.json_file_download)

    op_status = subd.operation_on_s3(op_code='edit')

    for op in op_status:

        if not op['op_code_status']:
            log.error('operation failed')
            exit(1)

    log.info('verification starts')

    log.info('key verificaion starts')
    kstatus = subd.verify_nfs(mount_point=ganesha_test_config['mount_point'],
                              op_type='edit')
    log.info('key verification complete: %s' % kstatus)

    return kstatus
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    log.info("initiating nfs ganesha")

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 1
    config.objects_count = 1
    config.objects_size_range = {"min": 10, "max": 50}

    log.info("begin IO")

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(
        count=None,
        json_fname=rgw.json_file_upload,
        mount_point=ganesha_test_config["mount_point"],
        auth=rgw.connection["conn"],
    )

    subd = SubdirAndObjects(
        base_dir_list=None,
        config=None,
        json_fname=rgw.json_file_upload,
        auth=rgw.connection["conn"],
        download_json_fname=rgw.json_file_download,
    )

    op_status = subd.operation_on_s3(op_code="edit")

    for op in op_status:

        if not op["op_code_status"]:
            log.error("operation failed")
            exit(1)

    log.info("verification starts")

    log.info("key verificaion starts")
    kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"],
                              op_type="edit")
    log.info("key verification complete: %s" % kstatus)

    return kstatus
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    verification = {"bucket": False, "key": False}

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    io_config = {
        "base_dir_count": 2,
        "sub_dir_count": 2,
        "Files": {"files_in_dir": 2, "size": 10},
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("io_config: %s\n" % io_config)

    log.info("initiating nfs ganesha")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )

    nfs_ganesha.initialize()

    log.info("authenticating rgw user")

    rgw_auth = Authenticate(
        user_id=nfs_ganesha.user_id,
        access_key=nfs_ganesha.access_key,
        secret_key=nfs_ganesha.secret_key,
    )

    auth = rgw_auth.do_auth()

    log.info("begin IO")

    bdir = BaseDir(
        int(io_config["base_dir_count"]),
        rgw_auth.json_file_upload,
        ganesha_test_config["mount_point"],
        auth["conn"],
    )

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"])

    sub_dir_creation = threading.Thread(target=subdir.create)  # adding this to thread

    sub_dir_creation.start()

    # kill RGW process

    log.info("killing rgw process")

    p = Process(name="radosgw")
    p.find()

    if p.process is None:
        log.info("process not running")
    else:
        log.info("killing the process")
        p.process.kill()

    sub_dir_creation.join()

    log.info("verification starts")

    time.sleep(15)

    bstatus = bdir.verify_s3()

    log.info("bucket verification complete")

    kstatus = subdir.verify_s3()

    log.info("key verification complete")

    verification = {}

    for bs in bstatus:

        if not bs["exists"]:
            verification["bucket"] = False
            break
        else:
            verification["bucket"] = True

    for ks in kstatus:

        if not ks["exists"]:
            verification["key"] = False

        if ks["type"] == "file":

            if not ks["md5_matched"]:
                verification["key"] = False
                break

            if not ks["size_matched"]:
                verification["key"] = False
                break
        else:
            verification["key"] = True

    return verification