Ejemplo n.º 1
0
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('create m buckets, n keys and download')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.upload(buckets)
            rgw.download_keys()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec(config):

    test_info = AddTestInfo(
        'enable versioning on a bucket and upload keys and its versions')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.enable_versioning = True
            rgw.version_count = config.version_count
            buckets = rgw.create_bucket()
            rgw.set_bucket_properties()
            rgw.upload(buckets)

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        sys.exit(1)
Ejemplo n.º 3
0
def test_exec(config):

    test_info = AddTestInfo('multipart Upload')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        # test case starts

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        log.info('multipart upload enabled')

        for each_user in all_user_details:

            config.objects_count = 1

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.multipart_upload(buckets)

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 4
0
def test_exec(config):

    test_info = AddTestInfo("create users")

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:
        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count,
                                                config.cluster_name)

        # dump the list of users into a file

        with open("user_details", "w") as fout:
            json.dump(all_user_details, fout)

        test_info.success_status("user creation completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("user creation failed: %s" % e)
        sys.exit(1)
def test_exec_read(config):

    grants = {'permission': 'READ', 'user_id': None, 'recursive': True}

    test_info = AddTestInfo(
        'Test with read permission on buckets for all users')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        # test case starts

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        user1 = all_user_details[0]
        log.info('user1: %s' % user1)

        all_user_details.pop(0)

        u1 = ObjectOps(config, user1)

        for each_user in all_user_details:

            u2 = ObjectOps(config, each_user)

            u2_canonical_id = u2.canonical_id

            log.info('canonical id of u2: %s' % u2_canonical_id)

            grants['user_id'] = u2_canonical_id

            u1.grants = None
            u1.create_bucket()
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.grants = None
            u2.set_bucket_properties()

            # set permissions and read

            u1.grants = grants
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.grants = None
            u2.set_bucket_properties()

        test_info.success_status('test completed')

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 6
0
def test_exec(config):

    test_info = AddTestInfo('create m buckets')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        # test case starts

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)

            assert rgw.create_bucket()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 7
0
def test_exec(config):
    test_info = AddTestInfo("multipart Upload with cancel and download")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        # test case starts
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        log.info("multipart upload enabled")
        for each_user in all_user_details:
            config.objects_count = 1
            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.break_upload_at_part_no = config.break_at_part_no
            rgw.multipart_upload(buckets)

            log.info("starting at part no: %s" % config.break_at_part_no)
            log.info("--------------------------------------------------")
            rgw.break_upload_at_part_no = 0
            rgw.multipart_upload(buckets)
            rgw.download_keys()
        test_info.success_status("test completed")
        sys.exit(0)
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 8
0
def test_exec(config):

    test_info = AddTestInfo('create m buckets, n objects and delete')

    try:

        test_info.started_info()
        add_io_info = AddIOInfo()
        read_io_info = ReadIOInfo()
        add_io_info.initialize()

        rgw_service = RGWService()
        quota_mgmt = QuotaMgmt()
        test_config = AddToCephConf()

        if config.shards:
            test_config.set_to_ceph_conf(
                'global', ConfigOpts.rgw_override_bucket_index_max_shards,
                config.shards)

            rgw_service.restart()

            no_of_shards_for_each_bucket = int(config.shards) * int(
                config.bucket_count)

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            if config.max_objects:
                quota_mgmt.set_bucket_quota(each_user['user_id'],
                                            config.max_objects)
                quota_mgmt.enable_bucket_quota(each_user['user_id'])

            rgw = ObjectOps(config, each_user)

            buckets = rgw.create_bucket()
            rgw.upload(buckets)

        #read_io_info.verify_io()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec_read(config):
    test_info = AddTestInfo('Test with read permission on buckets')
    try:
        # test case starts
        add_io_info = AddIOInfo()
        add_io_info.initialize()
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        user1 = all_user_details[0]
        log.info('user1: %s' % user1)
        user2 = all_user_details[1]
        log.info('user2: %s' % user2)
        u1 = ObjectOps(config, user1)
        u2 = ObjectOps(config, user2)
        u1_grants = {
            'permission': 'READ',
            'user_id': u2.canonical_id,
            'recursive': True
        }
        u2_grants = {
            'permission': 'FULL_CONTROL',
            'user_id': u1.canonical_id,
            'recursive': True
        }
        u1.grants = u1_grants
        u1.create_bucket()
        u1_buckets = u1.set_bucket_properties()
        u1.upload(u1_buckets, object_base_name=u1.canonical_id + '.key')
        all_keys = u1_buckets[0].get_all_keys()
        for key in all_keys:
            log.info('all keys from user 1--------------')
            log.info('name: %s' % key.name)
        u2.grants = u2_grants
        u2.create_bucket()
        u2_buckets = u2.set_bucket_properties()
        bu2 = u1.connection['conn'].get_bucket(u2_buckets[0].name)
        log.info('copying the objects from u1 to u2')
        for each in all_keys:
            bu2.copy_key(each.key, u1_buckets[0].name, each.key)
        all_keys2 = bu2.get_all_keys()
        for key in all_keys2:
            log.info('all keys from user 2--------------')
            log.info('name: %s' % key.name)
        log.info('verifying copied objects--------')
        u2.grants = None
        u2_buckets = u2.set_bucket_properties()
        all_keys3 = u2_buckets[0].get_all_keys()
        for key in all_keys3:
            log.info('all keys from user 2--------------')
            log.info('name: %s' % key.name)
        test_info.success_status('test completed')
    except AssertionError as e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec_read(config):
    test_info = AddTestInfo("Test with read permission on buckets")
    try:
        # test case starts
        add_io_info = AddIOInfo()
        add_io_info.initialize()
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        user1 = all_user_details[0]
        log.info("user1: %s" % user1)
        user2 = all_user_details[1]
        log.info("user2: %s" % user2)
        u1 = ObjectOps(config, user1)
        u2 = ObjectOps(config, user2)
        u1_grants = {
            "permission": "READ",
            "user_id": u2.canonical_id,
            "recursive": True,
        }
        u2_grants = {
            "permission": "FULL_CONTROL",
            "user_id": u1.canonical_id,
            "recursive": True,
        }
        u1.grants = u1_grants
        u1.create_bucket()
        u1_buckets = u1.set_bucket_properties()
        u1.upload(u1_buckets, object_base_name=u1.canonical_id + ".key")
        all_keys = u1_buckets[0].get_all_keys()
        for key in all_keys:
            log.info("all keys from user 1--------------")
            log.info("name: %s" % key.name)
        u2.grants = u2_grants
        u2.create_bucket()
        u2_buckets = u2.set_bucket_properties()
        bu2 = u1.connection["conn"].get_bucket(u2_buckets[0].name)
        log.info("copying the objects from u1 to u2")
        for each in all_keys:
            bu2.copy_key(each.key, u1_buckets[0].name, each.key)
        all_keys2 = bu2.get_all_keys()
        for key in all_keys2:
            log.info("all keys from user 2--------------")
            log.info("name: %s" % key.name)
        log.info("verifying copied objects--------")
        u2.grants = None
        u2_buckets = u2.set_bucket_properties()
        all_keys3 = u2_buckets[0].get_all_keys()
        for key in all_keys3:
            log.info("all keys from user 2--------------")
            log.info("name: %s" % key.name)
        test_info.success_status("test completed")
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo("multipart Upload with cancel and download")

    try:

        # test case starts

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                })

        log.info("multipart upload enabled")

        for each_user in all_user_details:
            config.objects_count = 2

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.break_upload_at_part_no = config.break_at_part_no
            rgw.multipart_upload(buckets)

            log.info("starting at part no: %s" % config.break_at_part_no)
            log.info("--------------------------------------------------")

            rgw.break_upload_at_part_no = 0
            rgw.multipart_upload(buckets)
            rgw.download_keys()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec(config):
    test_info = AddTestInfo("create m buckets, n objects and delete")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        test_info.started_info()
        rgw_service = RGWService()
        quota_mgmt = QuotaMgmt()
        test_config = AddToCephConf()
        if config.shards:
            test_config.set_to_ceph_conf(
                "global", ConfigOpts.rgw_override_bucket_index_max_shards,
                config.shards)
            log.info(
                "test to continue after service restart, sleept time 120 seconds"
            )
            no_of_shards_for_each_bucket = int(config.shards) * int(
                config.bucket_count)
        if config.dynamic_sharding is True:
            test_config.set_to_ceph_conf(
                "global",
                ConfigOpts.rgw_max_objs_per_shard,
                config.max_objects_per_shard,
            )
            test_config.set_to_ceph_conf("global",
                                         ConfigOpts.rgw_dynamic_resharding,
                                         True)
            num_shards_expected = config.objects_count / config.max_objects_per_shard
            log.info("num_shards_expected: %s" % num_shards_expected)
            log.info(
                "test to continue after service restart, sleept time 120 seconds"
            )
        rgw_service.restart()
        time.sleep(120)
        all_user_details = rgw_lib.create_users(config.user_count)
        for each_user in all_user_details:
            if config.max_objects:
                quota_mgmt.set_bucket_quota(each_user["user_id"],
                                            config.max_objects)
                quota_mgmt.enable_bucket_quota(each_user["user_id"])
            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.upload(buckets)
        test_info.success_status("test completed")
        sys.exit(0)
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('multipart Upload with cancel and download')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)


        for each_user in all_user_details:
            add_io_info.add_user_info(**{'user_id': each_user['user_id'],
                                         'access_key': each_user['access_key'],
                                         'secret_key': each_user['secret_key']})


        log.info('multipart upload enabled')

        for each_user in all_user_details:
            config.objects_count = 2

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.break_upload_at_part_no = config.break_at_part_no
            rgw.multipart_upload(buckets)

            log.info('starting at part no: %s' % config.break_at_part_no)
            log.info('--------------------------------------------------')

            rgw.break_upload_at_part_no = 0
            rgw.multipart_upload(buckets)
            rgw.download_keys()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo("create m buckets, n objects and delete")

    try:

        # test case starts

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                }
            )

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.bucket_ops.test_op_code = "delete"

            buckets = rgw.create_bucket()
            rgw.upload(buckets, test_op_code="delete")

            rgw.delete_keys()
            time.sleep(15)
            rgw.delete_bucket()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo(
        "enable versioning on a bucket and upload keys and its versions "
        "and suspend version on the same bucket"
    )

    try:

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                }
            )

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.enable_versioning = True
            rgw.version_count = config.version_count
            buckets = rgw.create_bucket()
            rgw.set_bucket_properties()
            rgw.upload(buckets)

            rgw.enable_versioning = False
            rgw.set_bucket_properties()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('create m buckets, n objects and delete')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.bucket_ops.test_op_code = 'delete'

            buckets = rgw.create_bucket()
            rgw.upload(buckets, test_op_code='delete')

            rgw.delete_keys()
            time.sleep(15)
            rgw.delete_bucket()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 17
0
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    read_io_info = ReadIOInfo()

    test_info = AddTestInfo("create m buckets")

    try:

        # test case starts

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        # write all users details to yaml file.

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                })

        for each_user in all_user_details:
            log.info("User credentials: %s" % each_user)
            rgw = ObjectOps(config, each_user)

            assert rgw.create_bucket()

        # read_io_info.verify_io()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 18
0
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    read_io_info = ReadIOInfo()

    test_info = AddTestInfo('create m buckets')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        # write all users details to yaml file.

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:
            log.info('User credentials: %s' % each_user)
            rgw = ObjectOps(config, each_user)

            assert rgw.create_bucket()

        #read_io_info.verify_io()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 19
0
def test_exec(config):
    add_io_info = AddIOInfo()
    add_io_info.initialize()

    read_io = ReadIOInfo()

    test_info = AddTestInfo('multipart Upload')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        log.info('multipart upload enabled')

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            config.objects_count = 1

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.multipart_upload(buckets)

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec(config):
    test_info = AddTestInfo("create m buckets, n keys and download")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        # test case starts
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        for each_user in all_user_details:
            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.upload(buckets)
            rgw.download_keys()
        test_info.success_status("test completed")
        sys.exit(0)
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 21
0
def test_exec_read(config):
    """
    config.bucket_count = 3
    config.objects_count = 3
    config.objects_size_range = {'min': 50, 'max': 100}
    """
    grants = {"permission": "READ", "user_id": None, "recursive": True}
    test_info = AddTestInfo("Test with read permission on buckets")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        # test case starts
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        user1 = all_user_details[0]
        log.info("user1: %s" % user1)
        user2 = all_user_details[1]
        log.info("user2: %s" % user2)
        u1 = ObjectOps(config, user1)
        u2 = ObjectOps(config, user2)
        u2_canonical_id = u2.canonical_id
        log.info("canonical id of u2: %s" % u2_canonical_id)
        grants["user_id"] = u2_canonical_id
        u1.grants = None
        u1.create_bucket()
        u1.set_bucket_properties()
        u2.bucket_names = u1.bucket_names
        u2.buckets_created = u1.buckets_created
        u2.grants = None
        u2.set_bucket_properties()
        # set permissions and read
        u1.grants = grants
        u1.set_bucket_properties()
        u2.bucket_names = u1.bucket_names
        u2.buckets_created = u1.buckets_created
        u2.grants = None
        u2.set_bucket_properties()
        test_info.success_status("test completed")
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('enable versioning on a bucket and upload '
                            'keys and its versions and delete its versions')

    try:

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.enable_versioning = True
            rgw.version_count = config.version_count
            buckets = rgw.create_bucket()
            rgw.set_bucket_properties()
            rgw.upload(buckets)
            rgw.delete_key_version()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:

        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec_write(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo(
        "give the permission for all the users and then reset it")

    try:

        # test case starts

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:

            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                })

        user1 = all_user_details[0]
        u1 = ObjectOps(config, user1)
        log.info("user1: %s" % user1)

        all_user_details.pop(0)

        for each_user in all_user_details:

            print("iter ------------------>")

            log.info("user2: %s" % each_user)

            u2 = ObjectOps(config, each_user)

            u2_canonical_id = u2.canonical_id

            log.info("canonical id of u2: %s" % u2_canonical_id)

            log.info("setting only read permission")

            grants = {"permission": "READ", "user_id": None, "recursive": True}

            log.info("write persmission are not set")
            grants["user_id"] = u2_canonical_id

            u1.grants = grants

            u1.create_bucket()
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.json_file_upload = u1.json_file_upload
            u2.json_file_download = u1.json_file_download

            u2.grants = None
            u2.create_bucket()
            buckets = u2.set_bucket_properties()
            key_created = u2.upload(buckets)
            if not key_created:
                log.info(
                    "no write permission set and hence failing to create object"
                )

            elif key_created:
                raise AssertionError("object created even with no permission")

            log.info("setting permission to write also")

            grants = {
                "permission": "WRITE",
                "user_id": u2_canonical_id,
                "recursive": True,
            }
            u1.grants = grants
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.grants = None
            buckets = u2.set_bucket_properties()
            key_created = u2.upload(buckets,
                                    object_base_name=str(u2.canonical_id) +
                                    ".key")
            if key_created:
                log.info("object created after permission set")

        log.info(
            "***************** removing grants and making the bucket private *****************"
        )
        u1.grants = None
        u1.acls = "private"
        u1.set_bucket_properties()

        for each_user in all_user_details:

            u2 = ObjectOps(config, each_user)

            print("iter ------------------>")

            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.json_file_upload = u1.json_file_upload
            u2.json_file_download = u1.json_file_download

            u2.grants = None
            buckets = u2.set_bucket_properties()

            if not buckets:
                log.info("bucket init failed: %s" % buckets)
            elif buckets:

                key_created = u2.upload(buckets)
                if not key_created:
                    log.info(
                        "no write permission set and hence failing to create object"
                    )

                elif key_created:
                    raise AssertionError(
                        "object created even with no permission")

        test_info.success_status("test completed")

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 24
0
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    io_config = {
        "base_dir_count": 2,
        "sub_dir_count": 2,
        "Files": {
            "files_in_dir": 2,
            "size": 10
        },
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("io_config: %s\n" % io_config)

    log.info("initiating nfs ganesha")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )

    nfs_ganesha.initialize()

    log.info("authenticating rgw user")

    rgw_auth = Authenticate(
        user_id=nfs_ganesha.user_id,
        access_key=nfs_ganesha.access_key,
        secret_key=nfs_ganesha.secret_key,
    )

    auth = rgw_auth.do_auth()

    log.info("begin IO")

    bdir = BaseDir(
        int(io_config["base_dir_count"]),
        rgw_auth.json_file_upload,
        ganesha_test_config["mount_point"],
        auth["conn"],
    )

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload,
                              auth["conn"])
    subdir.create()

    op_status = subdir.operation_on_nfs(ganesha_test_config["mount_point"],
                                        op_code="move")

    verification = {"key": True, "move": True}

    for ops in op_status:

        if not ops["op_code_status"]:
            verification["move"] = False
            break

        else:
            log.info("verification starts")

            log.info("key verifcation starts on s3")
            kstatus = subdir.verify_s3()
            log.info("key verificaion complete: \n%s" % kstatus)

            for ks in kstatus:

                if ks["type"] == "file":
                    if ks["exists"]:
                        verification["key"] = True
                    else:
                        verification["key"] = False

    return verification
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    verification = {"bucket": False, "key": False}

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    io_config = {
        "base_dir_count": 2,
        "sub_dir_count": 2,
        "Files": {"files_in_dir": 2, "size": 10},
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("io_config: %s\n" % io_config)

    log.info("initiating nfs ganesha")

    log.info(
        "resetting rgw_user_info yaml file with null values to that new rgw user will be created and with new_config"
    )

    rgw_user_config_ops = RGWUserConfigOps(
        yaml_fname=ganesha_test_config["rgw_user_info"]
    )
    rgw_user_config_ops.update_config()

    log.info("will take new config and start the basic IO test")
    log.info("--------------------------------------------------")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )

    nfs_ganesha.initialize()

    log.info("authenticating rgw user")

    rgw_auth = Authenticate(
        user_id=nfs_ganesha.user_id,
        access_key=nfs_ganesha.access_key,
        secret_key=nfs_ganesha.secret_key,
    )

    auth = rgw_auth.do_auth()

    log.info("begin IO")

    bdir = BaseDir(
        int(io_config["base_dir_count"]),
        rgw_auth.json_file_upload,
        ganesha_test_config["mount_point"],
        auth["conn"],
    )

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"])
    subdir.create()

    log.info("verification starts")

    time.sleep(15)

    bstatus = bdir.verify_s3()

    log.info("bucket verification complete")

    kstatus = subdir.verify_s3()

    log.info("key verification complete")

    for bs in bstatus:

        if not bs["exists"]:
            verification["bucket"] = False
            break
        else:
            verification["bucket"] = True

    for ks in kstatus:

        if not ks["exists"]:
            verification["key"] = False

        if ks["type"] == "file":

            if not ks["md5_matched"]:
                verification["key"] = False
                break

            if not ks["size_matched"]:
                verification["key"] = False
                break
        else:
            verification["key"] = True

    return verification
Ejemplo n.º 26
0
def test_exec_write(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo(
        'give the permission for all the users and then reset it')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:

            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        user1 = all_user_details[0]
        u1 = ObjectOps(config, user1)
        log.info('user1: %s' % user1)

        all_user_details.pop(0)

        for each_user in all_user_details:

            print('iter ------------------>')

            log.info('user2: %s' % each_user)

            u2 = ObjectOps(config, each_user)

            u2_canonical_id = u2.canonical_id

            log.info('canonical id of u2: %s' % u2_canonical_id)

            log.info('setting only read permission')

            grants = {'permission': 'READ', 'user_id': None, 'recursive': True}

            log.info('write persmission are not set')
            grants['user_id'] = u2_canonical_id

            u1.grants = grants

            u1.create_bucket()
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.json_file_upload = u1.json_file_upload
            u2.json_file_download = u1.json_file_download

            u2.grants = None
            u2.create_bucket()
            buckets = u2.set_bucket_properties()
            key_created = u2.upload(buckets)
            if not key_created:
                log.info(
                    'no write permission set and hence failing to create object'
                )

            elif key_created:
                raise AssertionError("object created even with no permission")

            log.info('setting permission to write also')

            grants = {
                'permission': 'WRITE',
                'user_id': u2_canonical_id,
                'recursive': True
            }
            u1.grants = grants
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.grants = None
            buckets = u2.set_bucket_properties()
            key_created = u2.upload(buckets,
                                    object_base_name=str(u2.canonical_id) +
                                    ".key")
            if key_created:
                log.info('object created after permission set')

        log.info(
            '***************** removing grants and making the bucket private *****************'
        )
        u1.grants = None
        u1.acls = 'private'
        u1.set_bucket_properties()

        for each_user in all_user_details:

            u2 = ObjectOps(config, each_user)

            print('iter ------------------>')

            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.json_file_upload = u1.json_file_upload
            u2.json_file_download = u1.json_file_download

            u2.grants = None
            buckets = u2.set_bucket_properties()

            if not buckets:
                log.info('bucket init failed: %s' % buckets)
            elif buckets:

                key_created = u2.upload(buckets)
                if not key_created:
                    log.info(
                        'no write permission set and hence failing to create object'
                    )

                elif key_created:
                    raise AssertionError(
                        "object created even with no permission")

        test_info.success_status('test completed')

    except AssertionError as e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test(yaml_file_path):
    ganesha_test_config = {
        'mount_point': 'ganesha-mount',
        'rgw_user_info': yaml_file_path
    }

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    io_config = {
        'base_dir_count': 1,
        'sub_dir_count': 1,
        'Files': {
            'files_in_dir': 1,
            'size': 10
        }
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info('io_config: %s\n' % io_config)

    log.info('initiating nfs ganesha')

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config['mount_point'],
        yaml_fname=ganesha_test_config['rgw_user_info'])

    nfs_ganesha.initialize()

    log.info('authenticating rgw user')

    rgw_auth = Authenticate(user_id=nfs_ganesha.user_id,
                            access_key=nfs_ganesha.access_key,
                            secret_key=nfs_ganesha.secret_key)

    auth = rgw_auth.do_auth()

    log.info('begin IO')

    bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload,
                   ganesha_test_config['mount_point'], auth['conn'])

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload,
                              auth['conn'])
    subdir.create(file_type='text')

    log.info('operation starting: %s' % 'edit')

    op_status = subdir.operation_on_nfs(ganesha_test_config['mount_point'],
                                        op_code='edit')

    for op in op_status:

        if not op['op_code_status']:
            log.error('operation failed')
            exit(1)

    log.info('verification starts')

    kstatus = subdir.verify_s3(op_type='edit')
    log.info('key verificaion complete: \n%s' % kstatus)

    return kstatus
Ejemplo n.º 28
0
def test(yaml_file_path):

    ganesha_test_config = {
        'mount_point': 'ganesha-mount',
        'rgw_user_info': yaml_file_path
    }

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    io_config = {
        'base_dir_count': 2,
        'sub_dir_count': 2,
        'Files': {
            'files_in_dir': 2,
            'size': 10
        }
    }

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info('io_config: %s\n' % io_config)

    log.info('initiating nfs ganesha')

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config['mount_point'],
        yaml_fname=ganesha_test_config['rgw_user_info'])

    nfs_ganesha.initialize()

    log.info('authenticating rgw user')

    rgw_auth = Authenticate(user_id=nfs_ganesha.user_id,
                            access_key=nfs_ganesha.access_key,
                            secret_key=nfs_ganesha.secret_key)

    auth = rgw_auth.do_auth()

    log.info('begin IO')

    bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload,
                   ganesha_test_config['mount_point'], auth['conn'])

    bdirs = bdir.create(uname=str(rgw_auth.user_id))

    subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload,
                              auth['conn'])
    subdir.create()

    log.info('verification starts')

    time.sleep(15)

    bstatus = bdir.verify_s3()

    log.info('bucket verification complete')

    kstatus = subdir.verify_s3()

    log.info('key verification complete')

    verification = {}

    for bs in bstatus:

        if not bs['exists']:
            verification['bucket'] = False
            break
        else:
            verification['bucket'] = True

    for ks in kstatus:

        if not ks['exists']:
            verification['key'] = False

        if ks['type'] == 'file':

            if not ks['md5_matched']:
                verification['key'] = False
                break

            if not ks['size_matched']:
                verification['key'] = False
                break
        else:
            verification['key'] = True

    return verification
def test(yaml_file_path):

    ganesha_test_config = {'mount_point': 'ganesha-mount',
                           'rgw_user_info': yaml_file_path}

    log.info('ganesha_test_config :%s\n' % ganesha_test_config)

    log.info('initiating nfs ganesha')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    nfs_ganesha = PrepNFSGanesha(mount_point=ganesha_test_config['mount_point'],
                                 yaml_fname=ganesha_test_config['rgw_user_info'])
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 2
    config.objects_count = 2
    config.objects_size_range = {'min': 10, 'max': 50}

    log.info('begin IO')

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config['mount_point'],
                   auth=rgw.connection['conn'])

    subd = SubdirAndObjects(base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection['conn'])

    ks_op_status = subd.operation_on_s3(op_code='delete')

    verification= {'delete': True,
                   'key': True}

    for status in ks_op_status:

        if not status['op_code_status']:
            verification['delete'] = False
            break

    if verification['delete']:

        log.info('verification starts')

        log.info('key verificaion starts')
        kstatus = subd.verify_nfs(mount_point=ganesha_test_config['mount_point'])
        log.info('key verification complete: %s' % kstatus)

        for ks in kstatus:
            if not ks['exists']:
                verification['key'] = True

            if ks['exists']:
                verification['key'] = False


    return verification
Ejemplo n.º 30
0
def test(yaml_file_path):

    ganesha_test_config = {
        "mount_point": "ganesha-mount",
        "rgw_user_info": yaml_file_path,
    }

    log.info("ganesha_test_config :%s\n" % ganesha_test_config)

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    log.info("initiating nfs ganesha")

    nfs_ganesha = PrepNFSGanesha(
        mount_point=ganesha_test_config["mount_point"],
        yaml_fname=ganesha_test_config["rgw_user_info"],
    )
    nfs_ganesha.initialize()

    config = Config()
    config.bucket_count = 1
    config.objects_count = 2
    config.objects_size_range = {"min": 10, "max": 50}

    log.info("begin IO")

    rgw_user = nfs_ganesha.read_config()

    rgw = ObjectOps(config, rgw_user)

    buckets = rgw.create_bucket()
    rgw.upload(buckets)

    time.sleep(20)

    bdir = BaseDir(
        count=None,
        json_fname=rgw.json_file_upload,
        mount_point=ganesha_test_config["mount_point"],
        auth=rgw.connection["conn"],
    )

    subd = SubdirAndObjects(
        base_dir_list=None,
        config=None,
        json_fname=rgw.json_file_upload,
        auth=rgw.connection["conn"],
    )

    ks_op_status = subd.operation_on_s3(op_code="move")

    time.sleep(300)  # wait for 5 mins

    # after move, verify on nfs for the changes

    verification = {"delete": True, "key": True}

    for status in ks_op_status:

        if not status["op_code_status"]:
            verification["delete"] = False
            break

    if verification["delete"]:

        log.info("verification starts")

        log.info("key verificaion starts")
        kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"])
        log.info("key verification complete: %s" % kstatus)

        for ks in kstatus:
            if ks["exists"]:
                verification["key"] = True

            else:
                verification["key"] = False

    return verification