parser = argparse.ArgumentParser(description='RGW Automation')

    parser.add_argument('-c',
                        dest="config",
                        help='RGW Test yaml configuration')

    parser.add_argument('-p',
                        dest="port",
                        default='8080',
                        help='port number where RGW is running')

    args = parser.parse_args()

    yaml_file = args.config
    config = Config()

    config.user_count = 2

    config.port = args.port
    if yaml_file is None:
        config.bucket_count = 2
        config.objects_count = 10
        config.objects_size_range = {'min': 10, 'max': 50}
    else:
        with open(yaml_file, 'r') as f:
            doc = yaml.load(f)
        config.bucket_count = doc['config']['bucket_count']
        config.objects_count = doc['config']['objects_count']
        config.objects_size_range = {
            'min': doc['config']['objects_size_range']['min'],
def test(yaml_file_path):

    ganesha_test_config = {'mount_point': 'ganesha-mount',
                           'rgw_user_info': yaml_file_path}

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    log.info('initiating nfs ganesha')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(mount_point=ganesha_test_config['mount_point'],
                                 yaml_fname=ganesha_test_config['rgw_user_info'])
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 2
    config.objects_count = 2
    config.objects_size_range = {'min': 10, 'max': 50}

    log.info('begin IO')

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config['mount_point'],
                   auth=rgw.connection['conn'])

    subd = SubdirAndObjects(base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection['conn'])

    ks_op_status = subd.operation_on_s3(op_code='delete')

    verification= {'delete': True,
                   'key': True}

    for status in ks_op_status:

        if not status['op_code_status']:
            verification['delete'] = False
            break

    if verification['delete']:

        log.info('verification starts')

        log.info('key verificaion starts')
        kstatus = subd.verify_nfs(mount_point=ganesha_test_config['mount_point'])
        log.info('key verification complete: %s' % kstatus)

        for ks in kstatus:
            if not ks['exists']:
                verification['key'] = True

            if ks['exists']:
                verification['key'] = False


    return verification
Exemple #3
0
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("initiating nfs ganesha")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 1
    config.objects_count = 2
    config.objects_size_range = {"min": 10, "max": 50}

    log.info("begin IO")

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(
        count=None,
        json_fname=rgw.json_file_upload,
        mount_point=ganesha_test_config["mount_point"],
        auth=rgw.connection["conn"],
    )

    subd = SubdirAndObjects(
        base_dir_list=None,
        config=None,
        json_fname=rgw.json_file_upload,
        auth=rgw.connection["conn"],
    )

    ks_op_status = subd.operation_on_s3(op_code="move")

    time.sleep(300)  # wait for 5 mins

    # after move, verify on nfs for the changes

    verification = {"delete": True, "key": True}

    for status in ks_op_status:

        if not status["op_code_status"]:
            verification["delete"] = False
            break

    if verification["delete"]:

        log.info("verification starts")

        log.info("key verificaion starts")
        kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"])
        log.info("key verification complete: %s" % kstatus)

        for ks in kstatus:
            if ks["exists"]:
                verification["key"] = True

            else:
                verification["key"] = False

    return verification
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    log.info("initiating nfs ganesha")

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 5
    config.objects_count = 2
    config.objects_size_range = {"min": 10, "max": 50}

    log.info("begin IO")

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(
        count=None,
        json_fname=rgw.json_file_upload,
        mount_point=ganesha_test_config["mount_point"],
        auth=rgw.connection,
    )

    subd = SubdirAndObjects(
        base_dir_list=None,
        config=None,
        json_fname=rgw.json_file_upload,
        auth=rgw.connection,
    )

    time.sleep(15)

    log.info("verification starts")

    log.info("bucket verification starts")
    bstatus = bdir.verify_nfs()
    log.info("bucket verification complete:%s" % bstatus)

    log.info("key verificaion starts")
    kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"])
    log.info("key verification complete: %s" % kstatus)

    verification = {"bucket": True, "key": True}

    if not bstatus:
        verification["bucket"] = False
    else:
        verification["bucket"] = True

    for ks in kstatus:

        if not ks["exists"]:
            verification["key"] = False

        if not ks["md5_matched"]:
            verification["key"] = False
            break

        if not ks["size_matched"]:
            verification["key"] = False
            break

    return verification
def test(yaml_file_path):

    ganesha_test_config = {
        'mount_point': 'ganesha-mount',
        'rgw_user_info': yaml_file_path
    }

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    log.info('initiating nfs ganesha')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config['mount_point'],
        yaml_fname=ganesha_test_config['rgw_user_info'])
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 1
    config.objects_count = 1
    config.objects_size_range = {'min': 10, 'max': 50}

    log.info('begin IO')

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(count=None,
                   json_fname=rgw.json_file_upload,
                   mount_point=ganesha_test_config['mount_point'],
                   auth=rgw.connection['conn'])

    subd = SubdirAndObjects(base_dir_list=None,
                            config=None,
                            json_fname=rgw.json_file_upload,
                            auth=rgw.connection['conn'],
                            download_json_fname=rgw.json_file_download)

    op_status = subd.operation_on_s3(op_code='edit')

    for op in op_status:

        if not op['op_code_status']:
            log.error('operation failed')
            exit(1)

    log.info('verification starts')

    log.info('key verificaion starts')
    kstatus = subd.verify_nfs(mount_point=ganesha_test_config['mount_point'],
                              op_type='edit')
    log.info('key verification complete: %s' % kstatus)

    return kstatus
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    log.info("initiating nfs ganesha")

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 1
    config.objects_count = 1
    config.objects_size_range = {"min": 10, "max": 50}

    log.info("begin IO")

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(
        count=None,
        json_fname=rgw.json_file_upload,
        mount_point=ganesha_test_config["mount_point"],
        auth=rgw.connection["conn"],
    )

    subd = SubdirAndObjects(
        base_dir_list=None,
        config=None,
        json_fname=rgw.json_file_upload,
        auth=rgw.connection["conn"],
        download_json_fname=rgw.json_file_download,
    )

    op_status = subd.operation_on_s3(op_code="edit")

    for op in op_status:

        if not op["op_code_status"]:
            log.error("operation failed")
            exit(1)

    log.info("verification starts")

    log.info("key verificaion starts")
    kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"],
                              op_type="edit")
    log.info("key verification complete: %s" % kstatus)

    return kstatus