Ejemplo n.º 1
0
    def __init__(self, connection):

        log.debug("class: %s" % self.__class__.__name__)

        self.connection = connection
        self.add_io_info = AddIOInfo()
        self.test_op_code = "create"
Ejemplo n.º 2
0
def test_exec(config):

    test_info = AddTestInfo("create users")

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:
        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count,
                                                config.cluster_name)

        # dump the list of users into a file

        with open("user_details", "w") as fout:
            json.dump(all_user_details, fout)

        test_info.success_status("user creation completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("user creation failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 3
0
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('create m buckets, n keys and download')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.upload(buckets)
            rgw.download_keys()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec_read(config):

    grants = {'permission': 'READ', 'user_id': None, 'recursive': True}

    test_info = AddTestInfo(
        'Test with read permission on buckets for all users')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        # test case starts

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        user1 = all_user_details[0]
        log.info('user1: %s' % user1)

        all_user_details.pop(0)

        u1 = ObjectOps(config, user1)

        for each_user in all_user_details:

            u2 = ObjectOps(config, each_user)

            u2_canonical_id = u2.canonical_id

            log.info('canonical id of u2: %s' % u2_canonical_id)

            grants['user_id'] = u2_canonical_id

            u1.grants = None
            u1.create_bucket()
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.grants = None
            u2.set_bucket_properties()

            # set permissions and read

            u1.grants = grants
            u1.set_bucket_properties()
            u2.bucket_names = u1.bucket_names
            u2.buckets_created = u1.buckets_created

            u2.grants = None
            u2.set_bucket_properties()

        test_info.success_status('test completed')

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 5
0
 def create_admin_user(self, username, displayname, cluster_name="ceph"):
     try:
         add_io_info = AddIOInfo()
         cmd = (
             'radosgw-admin user create --uid="%s" --display-name="%s" --cluster %s'
             % (username, displayname, cluster_name))
         log.info("cmd: %s" % cmd)
         variable = subprocess.Popen(cmd,
                                     stdout=subprocess.PIPE,
                                     shell=True)
         v = variable.stdout.read()
         v_as_json = json.loads(v)
         # log.info(v_as_json)
         user_details = {}
         user_details["user_id"] = v_as_json["user_id"]
         user_details["display_name"] = v_as_json["display_name"]
         user_details["access_key"] = v_as_json["keys"][0]["access_key"]
         user_details["secret_key"] = v_as_json["keys"][0]["secret_key"]
         add_io_info.add_user_info(
             **{
                 "user_id": user_details["user_id"],
                 "access_key": user_details["access_key"],
                 "secret_key": user_details["secret_key"],
             })
         return user_details
     except subprocess.CalledProcessError as e:
         error = e.output + str(e.returncode)
         log.error(error)
         return False
Ejemplo n.º 6
0
def test_exec(config):
    test_info = AddTestInfo("multipart Upload with cancel and download")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        # test case starts
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        log.info("multipart upload enabled")
        for each_user in all_user_details:
            config.objects_count = 1
            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.break_upload_at_part_no = config.break_at_part_no
            rgw.multipart_upload(buckets)

            log.info("starting at part no: %s" % config.break_at_part_no)
            log.info("--------------------------------------------------")
            rgw.break_upload_at_part_no = 0
            rgw.multipart_upload(buckets)
            rgw.download_keys()
        test_info.success_status("test completed")
        sys.exit(0)
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 7
0
def test_exec(config):

    test_info = AddTestInfo('create m buckets')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        # test case starts

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)

            assert rgw.create_bucket()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec(config):

    test_info = AddTestInfo(
        'enable versioning on a bucket and upload keys and its versions')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.enable_versioning = True
            rgw.version_count = config.version_count
            buckets = rgw.create_bucket()
            rgw.set_bucket_properties()
            rgw.upload(buckets)

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        sys.exit(1)
Ejemplo n.º 9
0
 def create_admin_user(self, username, displayname, cluster_name='ceph'):
     try:
         add_io_info = AddIOInfo()
         cmd = 'radosgw-admin user create --uid=%s --display-name=%s --cluster %s' % (
             username, displayname, cluster_name)
         log.info('cmd: %s' % cmd)
         variable = subprocess.Popen(cmd,
                                     stdout=subprocess.PIPE,
                                     shell=True)
         v = variable.stdout.read()
         v_as_json = json.loads(v)
         # log.info(v_as_json)
         user_details = {}
         user_details['user_id'] = v_as_json['user_id']
         user_details['display_name'] = v_as_json['display_name']
         user_details['access_key'] = v_as_json['keys'][0]['access_key']
         user_details['secret_key'] = v_as_json['keys'][0]['secret_key']
         add_io_info.add_user_info(
             **{
                 'user_id': user_details['user_id'],
                 'access_key': user_details['access_key'],
                 'secret_key': user_details['secret_key']
             })
         return user_details
     except subprocess.CalledProcessError as e:
         error = e.output + str(e.returncode)
         log.error(error)
         return False
Ejemplo n.º 10
0
def test_exec(config):

    test_info = AddTestInfo('multipart Upload')

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    try:

        # test case starts

        test_info.started_info()

        all_user_details = rgw_lib.create_users(config.user_count)

        log.info('multipart upload enabled')

        for each_user in all_user_details:

            config.objects_count = 1

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.multipart_upload(buckets)

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec_read(config):
    test_info = AddTestInfo("Test with read permission on buckets")
    try:
        # test case starts
        add_io_info = AddIOInfo()
        add_io_info.initialize()
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        user1 = all_user_details[0]
        log.info("user1: %s" % user1)
        user2 = all_user_details[1]
        log.info("user2: %s" % user2)
        u1 = ObjectOps(config, user1)
        u2 = ObjectOps(config, user2)
        u1_grants = {
            "permission": "READ",
            "user_id": u2.canonical_id,
            "recursive": True,
        }
        u2_grants = {
            "permission": "FULL_CONTROL",
            "user_id": u1.canonical_id,
            "recursive": True,
        }
        u1.grants = u1_grants
        u1.create_bucket()
        u1_buckets = u1.set_bucket_properties()
        u1.upload(u1_buckets, object_base_name=u1.canonical_id + ".key")
        all_keys = u1_buckets[0].get_all_keys()
        for key in all_keys:
            log.info("all keys from user 1--------------")
            log.info("name: %s" % key.name)
        u2.grants = u2_grants
        u2.create_bucket()
        u2_buckets = u2.set_bucket_properties()
        bu2 = u1.connection["conn"].get_bucket(u2_buckets[0].name)
        log.info("copying the objects from u1 to u2")
        for each in all_keys:
            bu2.copy_key(each.key, u1_buckets[0].name, each.key)
        all_keys2 = bu2.get_all_keys()
        for key in all_keys2:
            log.info("all keys from user 2--------------")
            log.info("name: %s" % key.name)
        log.info("verifying copied objects--------")
        u2.grants = None
        u2_buckets = u2.set_bucket_properties()
        all_keys3 = u2_buckets[0].get_all_keys()
        for key in all_keys3:
            log.info("all keys from user 2--------------")
            log.info("name: %s" % key.name)
        test_info.success_status("test completed")
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec_read(config):
    test_info = AddTestInfo('Test with read permission on buckets')
    try:
        # test case starts
        add_io_info = AddIOInfo()
        add_io_info.initialize()
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        user1 = all_user_details[0]
        log.info('user1: %s' % user1)
        user2 = all_user_details[1]
        log.info('user2: %s' % user2)
        u1 = ObjectOps(config, user1)
        u2 = ObjectOps(config, user2)
        u1_grants = {
            'permission': 'READ',
            'user_id': u2.canonical_id,
            'recursive': True
        }
        u2_grants = {
            'permission': 'FULL_CONTROL',
            'user_id': u1.canonical_id,
            'recursive': True
        }
        u1.grants = u1_grants
        u1.create_bucket()
        u1_buckets = u1.set_bucket_properties()
        u1.upload(u1_buckets, object_base_name=u1.canonical_id + '.key')
        all_keys = u1_buckets[0].get_all_keys()
        for key in all_keys:
            log.info('all keys from user 1--------------')
            log.info('name: %s' % key.name)
        u2.grants = u2_grants
        u2.create_bucket()
        u2_buckets = u2.set_bucket_properties()
        bu2 = u1.connection['conn'].get_bucket(u2_buckets[0].name)
        log.info('copying the objects from u1 to u2')
        for each in all_keys:
            bu2.copy_key(each.key, u1_buckets[0].name, each.key)
        all_keys2 = bu2.get_all_keys()
        for key in all_keys2:
            log.info('all keys from user 2--------------')
            log.info('name: %s' % key.name)
        log.info('verifying copied objects--------')
        u2.grants = None
        u2_buckets = u2.set_bucket_properties()
        all_keys3 = u2_buckets[0].get_all_keys()
        for key in all_keys3:
            log.info('all keys from user 2--------------')
            log.info('name: %s' % key.name)
        test_info.success_status('test completed')
    except AssertionError as e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 13
0
 def __init__(self, bucket, filename):
     log.debug('class: %s' % self.__class__.__name__)
     self.bucket = bucket
     self.split_files_list = []
     self.filename = filename
     self.json_ops = None
     self.cancel_multpart = False
     self.mp = None
     self.md5 = None
     self.break_at_part_no = 0
     self.add_io_info = AddIOInfo()
def test_exec(config):
    test_info = AddTestInfo("create m buckets, n objects and delete")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        test_info.started_info()
        rgw_service = RGWService()
        quota_mgmt = QuotaMgmt()
        test_config = AddToCephConf()
        if config.shards:
            test_config.set_to_ceph_conf(
                "global", ConfigOpts.rgw_override_bucket_index_max_shards,
                config.shards)
            log.info(
                "test to continue after service restart, sleept time 120 seconds"
            )
            no_of_shards_for_each_bucket = int(config.shards) * int(
                config.bucket_count)
        if config.dynamic_sharding is True:
            test_config.set_to_ceph_conf(
                "global",
                ConfigOpts.rgw_max_objs_per_shard,
                config.max_objects_per_shard,
            )
            test_config.set_to_ceph_conf("global",
                                         ConfigOpts.rgw_dynamic_resharding,
                                         True)
            num_shards_expected = config.objects_count / config.max_objects_per_shard
            log.info("num_shards_expected: %s" % num_shards_expected)
            log.info(
                "test to continue after service restart, sleept time 120 seconds"
            )
        rgw_service.restart()
        time.sleep(120)
        all_user_details = rgw_lib.create_users(config.user_count)
        for each_user in all_user_details:
            if config.max_objects:
                quota_mgmt.set_bucket_quota(each_user["user_id"],
                                            config.max_objects)
                quota_mgmt.enable_bucket_quota(each_user["user_id"])
            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.upload(buckets)
        test_info.success_status("test completed")
        sys.exit(0)
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 15
0
class Bucket(object):
    def __init__(self, connection):

        log.debug('class: %s' % self.__class__.__name__)

        self.connection = connection
        self.add_io_info = AddIOInfo()
        self.test_op_code = 'create'

    def create(self, bucket_name, json_file):

        log.debug('function: %s' % self.create.__name__)

        log.info('in create bucket')

        """
        :param bucket_name: string
        :rtype: dict
        :return: create_bucket_stack:
                        args:
                            1.status: True, bucket created or False if bucket creation failed
                            2.bucket: bucket objects
                            3.msgs: error messages
        """

        try:

            bucket = self.connection.create_bucket(bucket_name)

            create_bucket_stack = {'status': True,
                                   'bucket': bucket}

            add_bucket_to_json = JBucket(json_file)

            add_bucket_to_json.add(bucket_name)

            self.add_io_info.add_bucket_info(self.connection.access_key, **{'bucket_name': bucket_name,
                                                                            'test_op_code': self.test_op_code})


        except (exception.AWSConnectionError, exception.BotoClientError, exception.S3ResponseError,
                exception.S3CreateError, IOError), e:
            log.error(e)
            create_bucket_stack = {'status': False,
                                   'msgs': e}

        return create_bucket_stack
Ejemplo n.º 16
0
    def __init__(self, key, json_file):

        log.debug('class: %s' % self.__class__.__name__)

        self.json_file = json_file
        self.jkey = JKeys(self.json_file)
        self.key = key
        self.add_io_info = AddIOInfo()
def test_exec(config):
    test_info = AddTestInfo("create m buckets, n keys and download")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        # test case starts
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        for each_user in all_user_details:
            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()
            rgw.upload(buckets)
            rgw.download_keys()
        test_info.success_status("test completed")
        sys.exit(0)
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 18
0
def test_exec_read(config):
    """
    config.bucket_count = 3
    config.objects_count = 3
    config.objects_size_range = {'min': 50, 'max': 100}
    """
    grants = {"permission": "READ", "user_id": None, "recursive": True}
    test_info = AddTestInfo("Test with read permission on buckets")
    add_io_info = AddIOInfo()
    add_io_info.initialize()
    try:
        # test case starts
        test_info.started_info()
        all_user_details = rgw_lib.create_users(config.user_count)
        user1 = all_user_details[0]
        log.info("user1: %s" % user1)
        user2 = all_user_details[1]
        log.info("user2: %s" % user2)
        u1 = ObjectOps(config, user1)
        u2 = ObjectOps(config, user2)
        u2_canonical_id = u2.canonical_id
        log.info("canonical id of u2: %s" % u2_canonical_id)
        grants["user_id"] = u2_canonical_id
        u1.grants = None
        u1.create_bucket()
        u1.set_bucket_properties()
        u2.bucket_names = u1.bucket_names
        u2.buckets_created = u1.buckets_created
        u2.grants = None
        u2.set_bucket_properties()
        # set permissions and read
        u1.grants = grants
        u1.set_bucket_properties()
        u2.bucket_names = u1.bucket_names
        u2.buckets_created = u1.buckets_created
        u2.grants = None
        u2.set_bucket_properties()
        test_info.success_status("test completed")
    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 19
0
def test_exec(config):

    test_info = AddTestInfo('create m buckets, n objects and delete')

    try:

        test_info.started_info()
        add_io_info = AddIOInfo()
        read_io_info = ReadIOInfo()
        add_io_info.initialize()

        rgw_service = RGWService()
        quota_mgmt = QuotaMgmt()
        test_config = AddToCephConf()

        if config.shards:
            test_config.set_to_ceph_conf(
                'global', ConfigOpts.rgw_override_bucket_index_max_shards,
                config.shards)

            rgw_service.restart()

            no_of_shards_for_each_bucket = int(config.shards) * int(
                config.bucket_count)

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            if config.max_objects:
                quota_mgmt.set_bucket_quota(each_user['user_id'],
                                            config.max_objects)
                quota_mgmt.enable_bucket_quota(each_user['user_id'])

            rgw = ObjectOps(config, each_user)

            buckets = rgw.create_bucket()
            rgw.upload(buckets)

        #read_io_info.verify_io()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo("multipart Upload with cancel and download")

    try:

        # test case starts

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                })

        log.info("multipart upload enabled")

        for each_user in all_user_details:
            config.objects_count = 2

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.break_upload_at_part_no = config.break_at_part_no
            rgw.multipart_upload(buckets)

            log.info("starting at part no: %s" % config.break_at_part_no)
            log.info("--------------------------------------------------")

            rgw.break_upload_at_part_no = 0
            rgw.multipart_upload(buckets)
            rgw.download_keys()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('multipart Upload with cancel and download')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)


        for each_user in all_user_details:
            add_io_info.add_user_info(**{'user_id': each_user['user_id'],
                                         'access_key': each_user['access_key'],
                                         'secret_key': each_user['secret_key']})


        log.info('multipart upload enabled')

        for each_user in all_user_details:
            config.objects_count = 2

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.break_upload_at_part_no = config.break_at_part_no
            rgw.multipart_upload(buckets)

            log.info('starting at part no: %s' % config.break_at_part_no)
            log.info('--------------------------------------------------')

            rgw.break_upload_at_part_no = 0
            rgw.multipart_upload(buckets)
            rgw.download_keys()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo(
        "enable versioning on a bucket and upload keys and its versions "
        "and suspend version on the same bucket"
    )

    try:

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                }
            )

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.enable_versioning = True
            rgw.version_count = config.version_count
            buckets = rgw.create_bucket()
            rgw.set_bucket_properties()
            rgw.upload(buckets)

            rgw.enable_versioning = False
            rgw.set_bucket_properties()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo("create m buckets, n objects and delete")

    try:

        # test case starts

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                }
            )

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.bucket_ops.test_op_code = "delete"

            buckets = rgw.create_bucket()
            rgw.upload(buckets, test_op_code="delete")

            rgw.delete_keys()
            time.sleep(15)
            rgw.delete_bucket()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('create m buckets, n objects and delete')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.bucket_ops.test_op_code = 'delete'

            buckets = rgw.create_bucket()
            rgw.upload(buckets, test_op_code='delete')

            rgw.delete_keys()
            time.sleep(15)
            rgw.delete_bucket()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 25
0
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    read_io_info = ReadIOInfo()

    test_info = AddTestInfo("create m buckets")

    try:

        # test case starts

        test_info.started_info()

        with open("user_details") as fout:
            all_user_details = simplejson.load(fout)

        # write all users details to yaml file.

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    "user_id": each_user["user_id"],
                    "access_key": each_user["access_key"],
                    "secret_key": each_user["secret_key"],
                })

        for each_user in all_user_details:
            log.info("User credentials: %s" % each_user)
            rgw = ObjectOps(config, each_user)

            assert rgw.create_bucket()

        # read_io_info.verify_io()

        test_info.success_status("test completed")

        sys.exit(0)

    except AssertionError as e:
        log.error(e)
        test_info.failed_status("test failed: %s" % e)
        sys.exit(1)
Ejemplo n.º 26
0
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    read_io_info = ReadIOInfo()

    test_info = AddTestInfo('create m buckets')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        # write all users details to yaml file.

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:
            log.info('User credentials: %s' % each_user)
            rgw = ObjectOps(config, each_user)

            assert rgw.create_bucket()

        #read_io_info.verify_io()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 27
0
def test_exec(config):
    add_io_info = AddIOInfo()
    add_io_info.initialize()

    read_io = ReadIOInfo()

    test_info = AddTestInfo('multipart Upload')

    try:

        # test case starts

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        log.info('multipart upload enabled')

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            config.objects_count = 1

            rgw = ObjectOps(config, each_user)
            buckets = rgw.create_bucket()

            rgw.multipart_upload(buckets)

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:
        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
def test_exec(config):

    add_io_info = AddIOInfo()
    add_io_info.initialize()

    test_info = AddTestInfo('enable versioning on a bucket and upload '
                            'keys and its versions and delete its versions')

    try:

        test_info.started_info()

        with open('user_details') as fout:
            all_user_details = simplejson.load(fout)

        for each_user in all_user_details:
            add_io_info.add_user_info(
                **{
                    'user_id': each_user['user_id'],
                    'access_key': each_user['access_key'],
                    'secret_key': each_user['secret_key']
                })

        for each_user in all_user_details:

            rgw = ObjectOps(config, each_user)
            rgw.enable_versioning = True
            rgw.version_count = config.version_count
            buckets = rgw.create_bucket()
            rgw.set_bucket_properties()
            rgw.upload(buckets)
            rgw.delete_key_version()

        test_info.success_status('test completed')

        sys.exit(0)

    except AssertionError, e:

        log.error(e)
        test_info.failed_status('test failed: %s' % e)
        sys.exit(1)
Ejemplo n.º 29
0
class MultipartPut(object):
    def __init__(self, bucket, filename):
        log.debug('class: %s' % self.__class__.__name__)
        self.bucket = bucket
        self.split_files_list = []
        self.filename = filename
        self.json_ops = None
        self.cancel_multpart = False
        self.mp = None
        self.md5 = None
        self.break_at_part_no = 0
        self.add_io_info = AddIOInfo()

    def iniate_multipart(self, json_file):
        try:
            self.json_ops = JMulpipart(json_file)
            log.info('initaiting multipart upload')
            file_path = os.path.dirname(self.filename)
            key_name = os.path.basename(self.filename)
            if not os.path.exists(json_file):
                log.info('fresh multipart upload')
                log.info('got filename: %s\ngot filepath: %s' %
                         (self.filename, file_path))
                utils.split_file(self.filename)
                self.split_files_list = sorted(
                    glob.glob(file_path + '/' + 'x*'))
                # log.info('split files list: %s' % self.split_files_list)
                self.json_ops.total_parts_count = len(self.split_files_list)
                self.json_ops.bucket_name = self.bucket.name
                log.info('total file parts %s' %
                         self.json_ops.total_parts_count)
                remaining_file_parts = []
                for each_file in self.split_files_list:
                    remaining_file_parts.append(
                        (each_file,
                         (self.split_files_list.index(each_file) + 1)))
                # log.info('remainig file parts structure :%s' % remaining_file_parts)
                self.json_ops.remaining_file_parts = remaining_file_parts
                self.mp = self.bucket.initiate_multipart_upload(key_name)
                self.json_ops.mp_id = self.mp.id
                self.json_ops.key_name = self.mp.key_name
                log.info('multipart_id :%s' % self.mp.id)
                log.info('key_name %s' % self.mp.key_name)
                self.json_ops.create_update_json_file()
            else:
                log.info('not fresh mulitpart')
                self.json_ops.refresh_json_data()
                self.mp = boto.s3.multipart.MultiPartUpload(self.bucket)
                self.mp.key_name = self.json_ops.key_name
                self.mp.id = self.json_ops.mp_id
                log.info('multipart_id :%s' % self.mp.id)
                log.info('key_name %s' % self.mp.key_name)
        except (exception.BotoClientError, exception.S3ResponseError) as e:
            log.error(e)
            return False

    def put(self):
        try:
            log.info('loading the json data')
            self.json_ops.refresh_json_data()
            self.json_ops.refresh_json_data()
            log.debug('remaining parts assigning')
            log.debug('making a copy of list of remaining parts')
            remaining_file_parts_copy = list(
                self.json_ops.remaining_file_parts)
            log.debug('starting the loop')
            for each_file_part in self.json_ops.remaining_file_parts:
                log.info('file part to upload: %s\nfile part number: %s' %
                         (each_file_part[0], int(each_file_part[1])))
                log.info('entering iteration')
                if self.break_at_part_no != 0 and self.break_at_part_no == int(
                        each_file_part[1]):
                    log.info('upload stopped at partno : %s' %
                             each_file_part[1])
                    break
                fp = open(each_file_part[0], 'rb')
                self.mp.upload_part_from_file(fp, int(each_file_part[1]))
                fp.close()
                log.info('part of file uploaded')
                remaining_file_parts_copy.remove(each_file_part)
                self.json_ops.remaining_file_parts = remaining_file_parts_copy
                log.info('updating json file')
                self.json_ops.create_update_json_file()
            log.info('printing all the uploaded parts')
            for part in self.mp:
                log.info('%s: %s' % (part.part_number, part.size))
            if self.break_at_part_no == 0:
                # if self.cancel_multpart:
                #     log.info('cancelling upload')
                #
                #     self.mp.cancel_upload()
                #
                #     if not self.mp:
                #         upload_status = {'status': False}
                #
                log.info('completing upload')
                self.mp.complete_upload()
                mp_object = self.bucket.get_key(self.mp.key_name)
                log.info('adding io info to yaml')
                self.add_io_info.add_keys_info(
                    self.bucket.connection.access_key, self.bucket.name, **{
                        'key_name': os.path.basename(self.filename),
                        'md5_on_s3': mp_object.etag.replace('"', ''),
                        'size': mp_object.size,
                        'upload_type': 'multipart',
                        'test_op_code': 'create'
                    })
            upload_status = {'status': True}
            """
            # the following code is better than splitting the file,
            # but commenting this for now and going ahead with splting the files

            chunk_count = int(math.ceil(filename / float(chunk_size)))
            # Send the file parts, using FileChunkIO to create a file-like object
            # that points to a certain byte range within the original file. We
            # set bytes to never exceed the original file size
            for i in range(chunk_count):
                offset = chunk_size * i
                bytes = min(chunk_size, file_size - offset)
                with FileChunkIO(filename, 'r', offset=offset, bytes=bytes) as fp:
                    mp.upload_part_from_file(fp, part_num=i + 1)
            # Finish the upload
            """
        except (exception.BotoClientError, exception.S3ResponseError) as e:
            log.error(e)
            upload_status = {'status': False, 'msg': e}
        return upload_status
Ejemplo n.º 30
0
class PutContentsFromFile(object):
    def __init__(self, key, json_file):
        log.debug('class: %s' % self.__class__.__name__)
        self.json_file = json_file
        self.jkey = JKeys(self.json_file)
        self.key = key
        self.add_io_info = AddIOInfo()

    def set_metadata(self, **metadata):
        log.debug('function: %s' % self.set_metadata.__name__)
        log.info('setting metadata %s' % metadata)
        metadata_name = list(metadata.keys())[0]
        metadata_value = list(metadata.values())[0]
        try:
            self.key.set_metadata(metadata_name, metadata_value)
            return True
        except (exception.BotoClientError, exception.S3ResponseError) as e:
            log.error(e)
            return False

    def put(self, filename, test_op_code='create'):
        log.debug('function: %s' % self.put.__name__)
        log.info('upload of file: %s' % filename)
        """
        :param filename: filename i.e along with location
        :return: dictionary, args:
                                1. status: True for successful upload or False for failed upload,
                                2. msgs : error messages
        """
        try:
            self.key.set_contents_from_filename(filename)
            md5_on_s3 = self.key.etag.replace('"', '')
            key_details = {
                'key_name': self.key.key,
                'size': os.stat(filename).st_size,
                'md5_local': utils.get_md5(filename),
                'md5_on_s3': md5_on_s3,
                'opcode': {
                    "edit": {
                        "new_md5": None
                    },
                    "move": {
                        "new_name": None
                    },
                    "delete": {
                        "deleted": None
                    }
                }
            }
            self.jkey.add(self.key.bucket.name, **key_details)
            self.add_io_info.add_keys_info(
                self.key.bucket.connection.access_key, self.key.bucket.name,
                **{
                    'key_name': self.key.key,
                    'size': os.stat(filename).st_size,
                    'md5_on_s3': md5_on_s3,
                    'upload_type': 'normal',
                    'test_op_code': test_op_code
                })
            upload_status = {'status': True}
        except (exception.BotoClientError, exception.S3ResponseError) as e:
            log.error(e)
            upload_status = {'status': False, 'msgs': e}
        return upload_status

    def get(self, filename):
        log.debug('function: %s' % self.get.__name__)
        log.info('getting the contents of file %s:' % self.key)
        log.info('download or get the file to filename: %s' % filename)
        """
        :param: filename: mention the filename which will be used to get the contents from s3 to this file.
                can be different from the original filename
        :return: dictionary, args:
                                1. status: True for successful download or False for failed download,
                                2. msgs : error messages
        """
        try:
            self.key.get_contents_to_filename(filename)
            md5_on_s3 = self.key.etag.replace('"', '')
            md5_local = utils.get_md5(filename)
            if md5_on_s3 == md5_local:
                md5_match = "match"
            else:
                md5_match = "no match"
            key_details = {
                'key_name': os.path.basename(filename),
                'key_name_os_s3': self.key.name,
                'size': os.stat(filename).st_size,
                'md5_local': md5_local,
                'md5_on_s3': md5_on_s3,
                'md5_match': md5_match,
                'opcode': {
                    "edit": {
                        "new_md5": None
                    },
                    "move": {
                        "new_name": None
                    },
                    "delete": {
                        "deleted": None
                    }
                }
            }
            self.jkey.add(self.key.bucket.name, **key_details)
            download_status = {'status': True}
        except (exception.BotoClientError, exception.S3ResponseError,
                Exception) as e:
            log.error(e)
            download_status = {'status': False, 'msgs': e}
        return download_status