def test_skip_versioning(self):
        """Tests the --skip_versioning flag works correctly."""
        self.parsed_args.skip_versioning = True
        details = bucket_details.BucketDetails(
            conf=self.parsed_args, source_bucket=self.source_bucket)

        self.assertEqual(self.parsed_args.location, details.location)
        self.assertEqual(self.parsed_args.storage_class, details.storage_class)
        self.assertEqual(self.source_bucket.get_iam_policy(),
                         details.iam_policy)
        self.assertEqual(self.source_bucket.acl.get_entities(),
                         details.acl_entities)
        self.assertEqual(self.source_bucket.default_object_acl.get_entities(),
                         details.default_obj_acl_entities)
        self.assertEqual(self.source_bucket.requester_pays,
                         details.requester_pays)
        self.assertEqual(self.source_bucket.cors, details.cors)
        self.assertEqual(self.source_bucket.default_kms_key_name,
                         details.default_kms_key_name)
        self.assertEqual(self.source_bucket.labels, details.labels)
        self.assertEqual(self.source_bucket.lifecycle_rules,
                         details.lifecycle_rules)
        self.assertEqual(self.source_bucket.get_logging(), details.logging)
        self.assertIsNone(details.versioning_enabled)
        self.assertEqual(self.source_bucket.list_notifications(),
                         details.notifications)

        details.versioning_enabled = 'test'
        self.assertIsNone(details.versioning_enabled)
예제 #2
0
def main(config, parsed_args, cloud_logger):
    """Main entry point for the bucket mover tool

    Args:
        config: A Configuration object with all of the config values needed for the script to run
        parsed_args: the configargparser parsing of command line options
        cloud_logger: A GCP logging client instance
    """

    cloud_logger.log_text("Starting GCS Bucket Mover")
    _print_config_details(cloud_logger, config)

    source_bucket = config.source_storage_client.lookup_bucket(  # pylint: disable=no-member
        config.bucket_name)

    # Get copies of all of the source bucket's IAM, ACLs and settings so they
    # can be copied over to the target project bucket
    source_bucket_details = bucket_details.BucketDetails(
        conf=parsed_args, source_bucket=source_bucket)

    _check_bucket_lock(cloud_logger, config, source_bucket,
                       source_bucket_details)

    sts_client = discovery.build(
        'storagetransfer', 'v1', credentials=config.target_project_credentials)

    if config.is_rename:
        _rename_bucket(cloud_logger, config, source_bucket,
                       source_bucket_details, sts_client)
    else:
        _move_bucket(cloud_logger, config, source_bucket, source_bucket_details,
                     sts_client)

    cloud_logger.log_text('Completed GCS Bucket Mover')
    def test_create_bucket_default_obj_acl_skip(
            self, mock_update_notifications, mock_update_iam_policies,
            mock_update_acl_entities, mock_create_bucket_api_call,
            mock_add_target_project_to_kms_key, mock_bucket):
        """Tests that the create bucket happens without the notifications set"""
        self.parsed_args = common.get_mock_args()
        self.source_bucket = common.get_mock_source_bucket()
        details = bucket_details.BucketDetails(
            conf=self.parsed_args, source_bucket=self.source_bucket)
        details.notifications = None

        mock_bucket_instance = mock.MagicMock()
        mock_bucket.return_value = mock_bucket_instance

        bucket_mover_service._create_bucket(mock.MagicMock(), mock.MagicMock(),
                                            mock.MagicMock(), mock.MagicMock(),
                                            details)

        self.assertEqual(details.default_kms_key_name,
                         mock_bucket_instance.default_kms_key_name)
        mock_add_target_project_to_kms_key.assert_called_once()
        mock_bucket_instance.enable_logging.assert_called_once_with(
            details.logging['logBucket'], details.logging['logObjectPrefix'])
        mock_create_bucket_api_call.assert_called_once()
        mock_update_iam_policies.assert_called_once()
        self.assertEqual(2, mock_update_acl_entities.call_count)
        mock_bucket_instance.acl.save.assert_called_once()
        mock_bucket_instance.default_object_acl.save.assert_called_once()
        mock_update_notifications.assert_not_called()
def move_bucket(config, parsed_args, cloud_logger):
    """Main entry point for the bucket mover script

    Args:
        config: A Configuration object with all of the config values needed for the script to run
        parsed_args: the configargparser parsing of command line options
        cloud_logger: A GCP logging client instance
    """

    cloud_logger.log_text("Starting GCS Bucket Mover")

    _print_and_log(
        cloud_logger,
        'Using the following service accounts for GCS credentials: ')
    _print_and_log(cloud_logger, 'Source Project - {}'.format(
        config.source_project_credentials.service_account_email))  # pylint: disable=no-member
    _print_and_log(cloud_logger, 'Target Project - {}'.format(
        config.target_project_credentials.service_account_email))  # pylint: disable=no-member

    source_bucket = config.source_storage_client.lookup_bucket(  # pylint: disable=no-member
        config.bucket_name)

    # Get copies of all of the source bucket's IAM, ACLs and settings so they can be copied over to
    # the target project bucket
    source_bucket_details = bucket_details.BucketDetails(
        conf=parsed_args, source_bucket=source_bucket)

    _check_bucket_lock(cloud_logger, config, source_bucket)
    target_temp_bucket = _create_temp_target_bucket(cloud_logger, config,
                                                    source_bucket_details)

    # Create STS client
    sts_client = discovery.build(
        'storagetransfer', 'v1', credentials=config.target_project_credentials)

    sts_account_email = _assign_sts_permissions(cloud_logger, sts_client,
                                                config, target_temp_bucket)
    _run_and_wait_for_sts_job(sts_client, config.target_project,
                              config.bucket_name, config.temp_bucket_name,
                              cloud_logger)

    _delete_empty_source_bucket(cloud_logger, source_bucket)
    _recreate_source_bucket(cloud_logger, config, source_bucket_details)
    _assign_sts_permissions_to_new_bucket(cloud_logger, sts_account_email,
                                          config)
    _run_and_wait_for_sts_job(sts_client, config.target_project,
                              config.temp_bucket_name, config.bucket_name,
                              cloud_logger)

    _delete_empty_temp_bucket(cloud_logger, target_temp_bucket)
    _remove_sts_permissions(cloud_logger, sts_account_email, config)
    cloud_logger.log_text('Completed GCS Bucket Mover')
    def test_default_constructor(self):
        """Tests the default object is successfully created."""
        details = bucket_details.BucketDetails(
            conf=self.parsed_args, source_bucket=self.source_bucket)

        # Test that the properties are set to the parsed_args/source_bucket values
        self.assertEqual(self.parsed_args.location, details.location)
        self.assertEqual(self.parsed_args.storage_class, details.storage_class)
        self.assertEqual(self.source_bucket.get_iam_policy(),
                         details.iam_policy)
        self.assertEqual(self.source_bucket.acl.get_entities(),
                         details.acl_entities)
        self.assertEqual(self.source_bucket.default_object_acl.get_entities(),
                         details.default_obj_acl_entities)
        self.assertEqual(self.source_bucket.requester_pays,
                         details.requester_pays)
        self.assertEqual(self.source_bucket.cors, details.cors)
        self.assertEqual(self.source_bucket.default_kms_key_name,
                         details.default_kms_key_name)
        self.assertEqual(self.source_bucket.labels, details.labels)
        self.assertEqual(self.source_bucket.lifecycle_rules,
                         details.lifecycle_rules)
        self.assertEqual(self.source_bucket.get_logging(), details.logging)
        self.assertEqual(self.source_bucket.versioning_enabled,
                         details.versioning_enabled)
        self.assertEqual(self.source_bucket.list_notifications(),
                         details.notifications)

        # Test that the bucket properties can be overridden
        value = 'test'
        details.iam_policy = details.acl_entities = details.default_obj_acl_entities = value
        details.requester_pays = details.cors = details.default_kms_key_name = value
        details.labels = details.lifecycle_rules = details.logging = value
        details.versioning_enabled = details.notifications = value
        self.assertEqual(value, details.iam_policy)
        self.assertEqual(value, details.acl_entities)
        self.assertEqual(value, details.default_obj_acl_entities)
        self.assertEqual(value, details.requester_pays)
        self.assertEqual(value, details.cors)
        self.assertEqual(value, details.default_kms_key_name)
        self.assertEqual(value, details.labels)
        self.assertEqual(value, details.lifecycle_rules)
        self.assertEqual(value, details.logging)
        self.assertEqual(value, details.versioning_enabled)
        self.assertEqual(value, details.notifications)
    def test_skip_everything(self):
        """Tests the object constructor when the skip_everything flag is True."""
        self.parsed_args.skip_everything = True
        details = bucket_details.BucketDetails(
            conf=self.parsed_args, source_bucket=self.source_bucket)

        self.assertEqual(self.parsed_args.location, details.location)
        self.assertEqual(self.parsed_args.storage_class, details.storage_class)
        self.assertIsNone(details.iam_policy)
        self.assertIsNone(details.acl_entities)
        self.assertIsNone(details.default_obj_acl_entities)
        self.assertIsNone(details.requester_pays)
        self.assertIsNone(details.cors)
        self.assertIsNone(details.default_kms_key_name)
        self.assertEqual(details.labels, {})
        self.assertIsNone(details.lifecycle_rules)
        self.assertIsNone(details.logging)
        self.assertIsNone(details.versioning_enabled)
        self.assertListEqual(details.notifications, [])

        # Test that the bucket properties cannot be overridden
        value = 'test'
        details.iam_policy = details.acl_entities = details.default_obj_acl_entities = value
        details.requester_pays = details.cors = details.default_kms_key_name = value
        details.labels = details.lifecycle_rules = details.logging = value
        details.versioning_enabled = details.notifications = value
        self.assertIsNone(details.iam_policy)
        self.assertIsNone(details.acl_entities)
        self.assertIsNone(details.default_obj_acl_entities)
        self.assertIsNone(details.requester_pays)
        self.assertIsNone(details.cors)
        self.assertIsNone(details.default_kms_key_name)
        self.assertEqual(details.labels, {})
        self.assertIsNone(details.lifecycle_rules)
        self.assertIsNone(details.logging)
        self.assertIsNone(details.versioning_enabled)
        self.assertListEqual(details.notifications, [])
예제 #7
0
def move_bucket(conf):
    """Main entry point for the bucket mover script

    Args:
        conf: the argparser parsing of command line options
    """

    # Load the environment config values set in config.sh and create the storage clients.
    config = configuration.Configuration.from_conf(conf)

    print('Using the following service accounts for GCS credentials: ')
    print('Source Project - {}'.format(
        config.source_project_credentials.service_account_email))  # pylint: disable=no-member
    print('Target Project - {}\n'.format(
        config.target_project_credentials.service_account_email))  # pylint: disable=no-member

    # Get copies of all of the source bucket's IAM, ACLs and settings so they can be copied over to
    # the target project bucket
    source_bucket = config.source_storage_client.lookup_bucket(  # pylint: disable=no-member
        config.bucket_name)
    source_bucket_details = bucket_details.BucketDetails(
        conf=conf, source_bucket=source_bucket)

    with yaspin(text='Create temp target bucket') as spinner:
        target_temp_bucket = _create_bucket(spinner, config,
                                            config.target_storage_client,
                                            config.temp_bucket_name,
                                            source_bucket_details)
        spinner.write('{} Bucket {} created in target project {}'.format(
            _CHECKMARK, config.temp_bucket_name, config.target_project))

    # Create STS client
    sts_client = discovery.build('storagetransfer',
                                 'v1',
                                 credentials=config.target_project_credentials)

    spinner_text = 'Assign STS permissions to source/temp buckets'
    with yaspin(text=spinner_text) as spinner:
        sts_account_email = _get_sts_iam_account_email(sts_client,
                                                       config.target_project)
        spinner.write('\nSTS service account for IAM usage: {}'.format(
            sts_account_email))
        _assign_sts_iam_roles(sts_account_email, config.source_storage_client,
                              config.source_project, config.bucket_name, True)
        _assign_sts_iam_roles(sts_account_email, config.target_storage_client,
                              config.target_project, target_temp_bucket.name,
                              True)
        spinner.ok(_CHECKMARK)

    _run_and_wait_for_sts_job(sts_client, config.target_project,
                              config.bucket_name, config.temp_bucket_name)

    with yaspin(text='Delete empty source bucket') as spinner:
        source_bucket.delete()
        spinner.ok(_CHECKMARK)

    with yaspin(text='Re-create source bucket in target project') as spinner:
        _create_bucket(spinner, config, config.target_storage_client,
                       config.bucket_name, source_bucket_details)
        spinner.ok(_CHECKMARK)

    with yaspin(text='Assign STS permissions to new source bucket') as spinner:
        _assign_sts_iam_roles(sts_account_email, config.target_storage_client,
                              config.target_project, config.bucket_name, False)
        spinner.ok(_CHECKMARK)

    _run_and_wait_for_sts_job(sts_client, config.target_project,
                              config.temp_bucket_name, config.bucket_name)

    with yaspin(text='Delete empty temp bucket') as spinner:
        target_temp_bucket.delete()
        spinner.ok(_CHECKMARK)

    spinner_text = 'Remove STS permissions from new source bucket'
    with yaspin(text=spinner_text) as spinner:
        _remove_sts_iam_roles(sts_account_email, config.target_storage_client,
                              config.bucket_name)
        spinner.ok(_CHECKMARK)