Exemplo n.º 1
0
def test_bulk_operation_table_output_query(object_storage_client):
    create_bucket_request = oci.object_storage.models.CreateBucketDetails()
    create_bucket_request.name = 'ObjectStorageTableOutput_{}'.format(util.random_number_string())
    create_bucket_request.compartment_id = util.COMPARTMENT_ID
    util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
    object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)

    result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name, '--src-dir', root_bulk_put_folder, '--output', 'table', '--query', "[?action=='Uploaded'].{file: file, \"opc-content-md5\": \"opc-content-md5\"}"])
    assert 'file' in result.output
    assert 'opc-content-md5' in result.output
    assert 'etag' not in result.output

    result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--dry-run', '--output', 'table'])
    assert 'action' in result.output
    assert 'object' in result.output
    assert '/a/Object_1' in result.output

    result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--dry-run', '--output', 'table', '--query', "[?object=='Object_0'][object]"])
    assert 'action' not in result.output
    assert '/a/Object_1' not in result.output
    assert 'Object_0' in result.output

    target_download_folder = os.path.join('tests', 'temp', create_bucket_request.name)
    result = invoke([
        'os', 'object', 'bulk-download',
        '--namespace', util.NAMESPACE,
        '--bucket-name', create_bucket_request.name,
        '--download-dir', target_download_folder,
        '--output', 'table',
    ])

    delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)

    shutil.rmtree(target_download_folder)
Exemplo n.º 2
0
    def get_compartment_to_rename(self):
        keep_paginating = True
        next_page = None
        while keep_paginating:
            if next_page:
                result = self.invoke(['compartment', 'list', '--compartment-id', util.TENANT_ID, '--limit', '1000', '--page', next_page])
            else:
                result = self.invoke(['compartment', 'list', '--compartment-id', util.TENANT_ID, '--limit', '1000'])

            parsed_result = json.loads(result.output)

            for item in parsed_result['data']:
                if item['name'].find(self.RENAME_COMPARTMENT_PREFIX) == 0:
                    return item

            if 'opc-next-page' in parsed_result:
                next_page = parsed_result['opc-next-page']
            keep_paginating = (next_page is not None)

        # If we're here, we need to create the compartment.
        # Could also create PythonCliCompartmentRenameTest-0 ahead of time and add privs
        # to the the compartment via policy.
        result = self.invoke([
            'compartment', 'create',
            '--compartment-id', util.TENANT_ID,
            '--name', '{}{}'.format(self.RENAME_COMPARTMENT_PREFIX, util.random_number_string()),
            '--description', 'Compartment for CLI compartment rename testing',
            '--profile', 'ADMIN'
        ])
        parsed_result = json.loads(result.output)
        print('Created compartment: {}'.format(parsed_result['data']))

        return parsed_result['data']
Exemplo n.º 3
0
def test_bulk_put_with_multipart_params(object_storage_client):
    create_bucket_request = oci.object_storage.models.CreateBucketDetails()
    create_bucket_request.name = 'ObjectStorageBulkPutMultipartsTest_{}'.format(util.random_number_string())
    create_bucket_request.compartment_id = util.COMPARTMENT_ID
    util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
    object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)

    result = invoke([
        'os', 'object', 'bulk-upload',
        '--namespace', util.NAMESPACE,
        '--bucket-name', create_bucket_request.name,
        '--src-dir', root_bulk_put_folder,
        '--part-size', '10'
    ])
    parsed_result = parse_json_response_from_mixed_output(result.output)
    assert parsed_result['skipped-objects'] == []
    assert parsed_result['upload-failures'] == {}
    assert len(parsed_result['uploaded-objects']) == get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)

    result = invoke([
        'os', 'object', 'bulk-upload',
        '--namespace', util.NAMESPACE,
        '--bucket-name', create_bucket_request.name,
        '--src-dir', root_bulk_put_folder,
        '--no-multipart',
        '--overwrite'
    ])
    parsed_result = parse_json_response_from_mixed_output(result.output)
    assert parsed_result['skipped-objects'] == []
    assert parsed_result['upload-failures'] == {}
    assert len(parsed_result['uploaded-objects']) == get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)

    delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
Exemplo n.º 4
0
    def subtest_compartment_operations(self):
        # We don't want to call compartment create with every run, so just call help to
        # make sure the command is at least there.
        result = self.invoke([
            'compartment', 'create', '--help', '--cli-rc-file',
            os.path.join('tests', 'resources', 'default_files',
                         'use_click_help')
        ])
        self.validate_response(result, json_response_expected=False)

        result = self.invoke([
            'compartment', 'list', '--compartment-id', util.TENANT_ID,
            '--limit', '1000'
        ])
        self.validate_response(result)

        result = self.invoke(['compartment', 'list', '--limit', '1000'])
        self.validate_response(result)

        result = self.invoke(['compartment', 'list', '--include-root'])
        self.validate_response(result)

        result = self.invoke(
            ['compartment', 'list', '--include-root', '--limit', '10'])
        self.validate_response(result)
        self.assertEquals(len(json.loads(result.output)['data']), 10)

        result = self.invoke(
            ['compartment', 'list', '--include-root', '--limit', '1'])
        self.validate_response(result)
        tenant_id = "ocid1.tenancy.oc1..aaaaaaaa3vi3ft3yi3sq4nhiql4nvbzjz6gipbn72h7werl6njs6xsq4wgdq"
        self.assertEquals(tenant_id,
                          json.loads(result.output)['data'][0]['id'])
        self.assertEquals(len(json.loads(result.output)['data']), 1)

        result = self.invoke(
            ['compartment', 'list', '--all', '--include-root'])
        self.validate_response(result)

        result = self.invoke([
            'compartment', 'list', '--compartment-id', util.TENANT_ID,
            '--include-root'
        ])
        self.validate_response(result)

        result = self.invoke(
            ['compartment', 'get', '--compartment-id', util.COMPARTMENT_ID])
        self.validate_response(result, expect_etag=True)

        update_description = 'Compartment used by CLI integration tests. {}'.format(
            util.random_number_string())
        result = self.invoke([
            'compartment', 'update', '--compartment-id', util.COMPARTMENT_ID,
            '--description', update_description
        ])
        self.validate_response(result, expect_etag=True)
        self.assertEquals(update_description,
                          json.loads(result.output)['data']['description'])
Exemplo n.º 5
0
def test_delete_when_no_objects_in_bucket(vcr_fixture, object_storage_client):
    create_bucket_request = oci.object_storage.models.CreateBucketDetails()
    create_bucket_request.name = 'ObjectStorageBulkDelete_{}'.format(util.random_number_string())
    create_bucket_request.compartment_id = util.COMPARTMENT_ID
    object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)

    result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name])
    assert 'There are no objects to delete in {}'.format(create_bucket_request.name) in result.output

    delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
Exemplo n.º 6
0
    def subtest_compartment_rename(self):
        compartment_to_rename = self.get_compartment_to_rename()

        updated_name = '{}{}'.format(self.RENAME_COMPARTMENT_PREFIX, util.random_number_string())
        updated_description = 'Updated {}'.format(updated_name)
        result = self.invoke(['compartment', 'update', '--compartment-id', compartment_to_rename['id'], '--name', updated_name, '--description', updated_description])
        self.validate_response(result, expect_etag=True)

        parsed_result = json.loads(result.output)
        self.assertEquals(compartment_to_rename['id'], parsed_result['data']['id'])
        self.assertEquals(updated_description, parsed_result['data']['description'])
        self.assertEquals(updated_name, parsed_result['data']['name'])
Exemplo n.º 7
0
    def subtest_compartment_operations(self):
        # We don't want to call compartment create with every run, so just call help to
        # make sure the command is at least there.
        result = self.invoke(['compartment', 'create', '--help', '--cli-rc-file', os.path.join('tests', 'resources', 'default_files', 'use_click_help')])
        self.validate_response(result, json_response_expected=False)

        result = self.invoke(['compartment', 'list', '--compartment-id', util.TENANT_ID, '--limit', '1000'])
        self.validate_response(result)

        result = self.invoke(['compartment', 'list', '--limit', '1000'])
        self.validate_response(result)

        result = self.invoke(['compartment', 'get', '--compartment-id', util.COMPARTMENT_ID])
        self.validate_response(result, expect_etag=True)

        update_description = 'Compartment used by CLI integration tests. {}'.format(util.random_number_string())
        result = self.invoke(
            ['compartment', 'update', '--compartment-id', util.COMPARTMENT_ID, '--description', update_description])
        self.validate_response(result, expect_etag=True)
        self.assertEquals(update_description, json.loads(result.output)['data']['description'])
Exemplo n.º 8
0
def test_get_multipart(object_storage_client):
    create_bucket_request = oci.object_storage.models.CreateBucketDetails()
    create_bucket_request.name = 'ObjectStorageBulkGetMultipartsTest_{}'.format(util.random_number_string())
    create_bucket_request.compartment_id = util.COMPARTMENT_ID
    util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
    object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)

    large_file_root_dir = os.path.join('tests', 'temp', 'multipart_get_large_files')
    if not os.path.exists(large_file_root_dir):
        os.makedirs(large_file_root_dir)
    util.create_large_file(os.path.join(large_file_root_dir, '1.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
    util.create_large_file(os.path.join(large_file_root_dir, '2.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
    util.create_large_file(os.path.join(large_file_root_dir, '3.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
    util.create_large_file(os.path.join(large_file_root_dir, '4.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
    util.create_large_file(os.path.join(large_file_root_dir, '5.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
    util.create_large_file(os.path.join(large_file_root_dir, '6.bin'), 1)  # Creates a 1 MiB file for variety

    invoke([
        'os', 'object', 'bulk-upload',
        '--namespace', util.NAMESPACE,
        '--bucket-name', create_bucket_request.name,
        '--src-dir', large_file_root_dir
    ])

    large_file_verify_dir = os.path.join('tests', 'temp', 'multipart_get_large_files_verify')

    invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name, '--download-dir', large_file_verify_dir, '--multipart-download-threshold', '128'])

    assert get_count_of_files_in_folder_and_subfolders(large_file_verify_dir) == 6
    assert filecmp.cmp(os.path.join(large_file_root_dir, '1.bin'), os.path.join(large_file_verify_dir, '1.bin'))
    assert filecmp.cmp(os.path.join(large_file_root_dir, '2.bin'), os.path.join(large_file_verify_dir, '2.bin'))
    assert filecmp.cmp(os.path.join(large_file_root_dir, '3.bin'), os.path.join(large_file_verify_dir, '3.bin'))
    assert filecmp.cmp(os.path.join(large_file_root_dir, '4.bin'), os.path.join(large_file_verify_dir, '4.bin'))
    assert filecmp.cmp(os.path.join(large_file_root_dir, '5.bin'), os.path.join(large_file_verify_dir, '5.bin'))
    assert filecmp.cmp(os.path.join(large_file_root_dir, '6.bin'), os.path.join(large_file_verify_dir, '6.bin'))

    shutil.rmtree(large_file_root_dir)
    shutil.rmtree(large_file_verify_dir)

    delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
Exemplo n.º 9
0
def generate_test_data(object_storage_client):
    global bulk_get_object_to_content, bulk_get_bucket_name, root_bulk_put_folder, bulk_put_large_files, bulk_put_mid_sized_files, bulk_put_bucket_name

    # Create a test bucket
    create_bucket_request = oci.object_storage.models.CreateBucketDetails()
    create_bucket_request.name = 'ObjectStorageBulkGetTest_{}'.format(util.random_number_string())
    create_bucket_request.compartment_id = util.COMPARTMENT_ID
    util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
    object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)

    bulk_get_bucket_name = create_bucket_request.name

    # Create items at various heirarchy levels (to be surfaced as different directories on disk)
    for i in range(OBJECTS_TO_CREATE_IN_BUCKET_FOR_BULK_GET):
        if i % 5 == 4:
            object_name = 'a/b/c/d/Object_{}'.format(i)
            bulk_get_prefix_to_object['a/b/c/d'].append(object_name)
        elif i % 5 == 3:
            object_name = 'a/b/c/Object_{}'.format(i)
            bulk_get_prefix_to_object['a/b/c'].append(object_name)
        elif i % 5 == 2:
            object_name = 'a/b/Object_{}'.format(i)
            bulk_get_prefix_to_object['a/b'].append(object_name)
        elif i % 5 == 1:
            # This is equivalent to a/ on the file system because we drop the leading slash (we drop path separators from the front to avoid unexpected results)
            object_name = '/a/Object_{}'.format(i)
            bulk_get_prefix_to_object['/a'].append(object_name)
        else:
            # At the root of the bucket
            object_name = 'Object_{}'.format(i)
            bulk_get_prefix_to_object[''].append(object_name)

        object_content = generate_random_string(CONTENT_STRING_LENGTH)
        object_storage_client.put_object(util.NAMESPACE, create_bucket_request.name, object_name, object_content)
        bulk_get_object_to_content[object_name] = object_content

    # makedirs creates all subfolders recursively
    root_bulk_put_folder = 'tests/temp/bulk_put_{}'.format(util.random_number_string())
    bulk_put_folder_leaf = '{}/subfolder1/subfolder2/subfolder3'.format(root_bulk_put_folder)
    if not os.path.exists(bulk_put_folder_leaf):
        os.makedirs(bulk_put_folder_leaf)

    create_bucket_request = oci.object_storage.models.CreateBucketDetails()
    create_bucket_request.name = 'ObjectStorageBulkPutTest_{}'.format(util.random_number_string())
    create_bucket_request.compartment_id = util.COMPARTMENT_ID
    util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
    object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)

    bulk_put_bucket_name = create_bucket_request.name

    subfolders = ['', 'subfolder1', 'subfolder1/subfolder2', 'subfolder1/subfolder2/subfolder3']
    for subfolder in subfolders:
        if subfolder == '':
            full_folder = root_bulk_put_folder
        else:
            full_folder = os.path.join(root_bulk_put_folder, subfolder)

        for i in range(OBJECTS_TO_CREATE_IN_FOLDER_FOR_BULK_PUT + 1):
            file_path = '{}/object_{}'.format(full_folder, i)
            if i != 0 and i % OBJECTS_TO_CREATE_IN_FOLDER_FOR_BULK_PUT == 0:
                # Put in one big file per subfolder
                util.create_large_file(file_path, LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
                bulk_put_large_files.add(file_path)
            elif i != 0 and i % 10 == 0:
                # Put in the occasional file with a reasonable size so that we can force multipart
                util.create_large_file(file_path, MID_SIZED_FILE_IN_MEBIBTYES)
                bulk_put_mid_sized_files.add(file_path)
            else:
                with open(file_path, 'w') as f:
                    f.write(generate_random_string(CONTENT_STRING_LENGTH))

    yield

    # Tear down stuff by deleting all the things and then deleting the buckets
    delete_bucket_and_all_items(object_storage_client, bulk_get_bucket_name)
    delete_bucket_and_all_items(object_storage_client, bulk_put_bucket_name)

    # Remove all directories recursively
    shutil.rmtree(root_bulk_put_folder)
Exemplo n.º 10
0
def apply_tags_to_tag_namespace(tag_namespace_id):
    tag_data_container.ensure_namespace_and_tags_active(invoke)

    tag_names_to_values = {
        tag_data_container.tags[0].name: 'tag_ns_mgmt {}'.format(util.random_number_string())
    }
    tag_data_container.write_defined_tags_to_file(
        os.path.join('tests', 'temp', 'defined_tags_mgmt.json'),
        tag_data_container.tag_namespace,
        tag_names_to_values
    )

    # Apply tags
    expected_freeform = {'tagOne': 'value three'}
    expected_defined = {tag_data_container.tag_namespace.name: tag_names_to_values}
    result = invoke([
        'iam', 'tag-namespace', 'update',
        '--tag-namespace-id', tag_namespace_id,
        '--freeform-tags', 'file://tests/resources/tagging/freeform_tags_2.json',
        '--defined-tags', 'file://tests/temp/defined_tags_mgmt.json',
        '--force'
    ])
    util.validate_response(result)
    parsed_result = json.loads(result.output)
    assert expected_freeform == parsed_result['data']['freeform-tags']
    assert expected_defined == parsed_result['data']['defined-tags']

    result = invoke(['iam', 'tag-namespace', 'get', '--tag-namespace-id', tag_namespace_id])
    util.validate_response(result)
    parsed_result = json.loads(result.output)
    assert expected_freeform == parsed_result['data']['freeform-tags']
    assert expected_defined == parsed_result['data']['defined-tags']

    result = invoke(['iam', 'tag-namespace', 'list', '-c', util.COMPARTMENT_ID, '--all'])
    parsed_result = json.loads(result.output)
    found_namespace = False
    for pr in parsed_result['data']:
        if pr['id'] == tag_namespace_id:
            assert expected_freeform == pr['freeform-tags']
            assert expected_defined == pr['defined-tags']
            found_namespace = True
            break
    assert found_namespace

    # Overwrite with different tags
    tag_names_to_values = {
        tag_data_container.tags[1].name: 'tag_ns_mgmt update {}'.format(util.random_number_string())
    }
    tag_data_container.write_defined_tags_to_file(
        os.path.join('tests', 'temp', 'defined_tags_mgmt.json'),
        tag_data_container.tag_namespace,
        tag_names_to_values
    )
    expected_freeform = {'tagOne': 'value1', 'tag_Two': 'value two'}
    expected_defined = {tag_data_container.tag_namespace.name: tag_names_to_values}
    result = invoke([
        'iam', 'tag-namespace', 'update',
        '--tag-namespace-id', tag_namespace_id,
        '--freeform-tags', 'file://tests/resources/tagging/freeform_tags_1.json',
        '--defined-tags', 'file://tests/temp/defined_tags_mgmt.json',
        '--force'
    ])
    util.validate_response(result)
    parsed_result = json.loads(result.output)
    assert expected_freeform == parsed_result['data']['freeform-tags']
    assert expected_defined == parsed_result['data']['defined-tags']

    result = invoke(['iam', 'tag-namespace', 'get', '--tag-namespace-id', tag_namespace_id])
    util.validate_response(result)
    parsed_result = json.loads(result.output)
    assert expected_freeform == parsed_result['data']['freeform-tags']
    assert expected_defined == parsed_result['data']['defined-tags']

    # Clear tags
    result = invoke([
        'iam', 'tag-namespace', 'update',
        '--tag-namespace-id', tag_namespace_id,
        '--freeform-tags', '{}',
        '--defined-tags', '{}',
        '--force'
    ])
    util.validate_response(result)
    parsed_result = json.loads(result.output)
    assert {} == parsed_result['data']['freeform-tags']
    assert {} == parsed_result['data']['defined-tags']

    result = invoke(['iam', 'tag-namespace', 'get', '--tag-namespace-id', tag_namespace_id])
    util.validate_response(result)
    parsed_result = json.loads(result.output)
    assert {} == parsed_result['data']['freeform-tags']
    assert {} == parsed_result['data']['defined-tags']

    result = invoke(['iam', 'tag-namespace', 'list', '-c', util.COMPARTMENT_ID, '--all'])
    parsed_result = json.loads(result.output)
    found_namespace = False
    for pr in parsed_result['data']:
        if pr['id'] == tag_namespace_id:
            assert {} == pr['freeform-tags']
            assert {} == pr['defined-tags']
            found_namespace = True
            break
    assert found_namespace