def testDagsExportWildcardSource(self, use_gsutil, isdir_mock, exec_mock): """Tests that when no SOURCE is provided, the entire folder is exported.""" self.ExpectEnvironmentGet( self.TEST_PROJECT, self.TEST_LOCATION, self.TEST_ENVIRONMENT_ID, response=self.MakeEnvironmentWithBucket()) destination = 'destdir' if use_gsutil: self._SetUpGsutil() fake_exec = kubectl_util.FakeExec() exec_mock.side_effect = fake_exec fake_exec.AddCallback( 0, self.MakeGsutilExecCallback( ['-m', 'cp', '-r', posixpath.join(self.test_gcs_bucket_path, 'dags', '*'), destination])) else: self._SetUpStorageApi() self.RunEnvironments('storage', 'dags', 'export', '--project', self.TEST_PROJECT, '--location', self.TEST_LOCATION, '--environment', self.TEST_ENVIRONMENT_ID, '--destination', destination) if use_gsutil: fake_exec.Verify() else: self.export_mock.assert_called_once_with( storage_util.BucketReference(self.test_gcs_bucket), 'dags/*', destination)
def testDagsDeleteTargetNotSpecified(self, use_gsutil, exec_mock): """Tests successful deletion of the entire DAGs directory.""" self.ExpectEnvironmentGet(self.TEST_PROJECT, self.TEST_LOCATION, self.TEST_ENVIRONMENT_ID, response=self.MakeEnvironmentWithBucket()) subdir_ref = storage_util.ObjectReference(self.test_gcs_bucket, 'dags/') self.ExpectObjectGet(subdir_ref) if use_gsutil: self._SetUpGsutil() fake_exec = kubectl_util.FakeExec() exec_mock.side_effect = fake_exec fake_exec.AddCallback( 0, self.MakeGsutilExecCallback([ '-m', 'rm', '-r', '{}/dags/*'.format(self.test_gcs_bucket_path) ])) else: self._SetUpStorageApi() self.RunEnvironments('storage', 'dags', 'delete', '--project', self.TEST_PROJECT, '--location', self.TEST_LOCATION, '--environment', self.TEST_ENVIRONMENT_ID) if use_gsutil: fake_exec.Verify() else: self.delete_mock.assert_called_once_with( storage_util.BucketReference(self.test_gcs_bucket), '*', 'dags')
def testPluginsExport(self, use_gsutil, isdir_mock, exec_mock): """Tests successful plugins exporting.""" self.ExpectEnvironmentGet(self.TEST_PROJECT, self.TEST_LOCATION, self.TEST_ENVIRONMENT_ID, response=self.MakeEnvironmentWithBucket()) source = 'subdir/file.txt' destination = 'destdir' if use_gsutil: self._SetUpGsutil() fake_exec = kubectl_util.FakeExec() exec_mock.side_effect = fake_exec fake_exec.AddCallback( 0, self.MakeGsutilExecCallback([ '-m', 'cp', '-r', posixpath.join(self.test_gcs_bucket_path, 'plugins', source), destination ])) else: self._SetUpStorageApi() self.RunEnvironments('storage', 'plugins', 'export', '--project', self.TEST_PROJECT, '--location', self.TEST_LOCATION, '--environment', self.TEST_ENVIRONMENT_ID, '--source', source, '--destination', destination) if use_gsutil: fake_exec.Verify() else: self.export_mock.assert_called_once_with( storage_util.BucketReference(self.test_gcs_bucket), posixpath.join('plugins', source), destination)
def testDataDeleteRestoresSubdir(self, use_gsutil, exec_mock): """Tests that the data dir is restored if it's missing after deletion.""" self.ExpectEnvironmentGet( self.TEST_PROJECT, self.TEST_LOCATION, self.TEST_ENVIRONMENT_ID, response=self.MakeEnvironmentWithBucket()) subdir_ref = storage_util.ObjectReference(self.test_gcs_bucket, 'data/') self.ExpectObjectGet(subdir_ref, exception=http_error.MakeHttpError(code=404)) self.ExpectObjectInsert(subdir_ref) if use_gsutil: self._SetUpGsutil() fake_exec = kubectl_util.FakeExec() exec_mock.side_effect = fake_exec fake_exec.AddCallback( 0, self.MakeGsutilExecCallback( ['-m', 'rm', '-r', '{}/data/*'.format( self.test_gcs_bucket_path)])) else: self._SetUpStorageApi() self.RunEnvironments('storage', 'data', 'delete', '--project', self.TEST_PROJECT, '--location', self.TEST_LOCATION, '--environment', self.TEST_ENVIRONMENT_ID) if use_gsutil: fake_exec.Verify() else: self.delete_mock.assert_called_once_with( storage_util.BucketReference(self.test_gcs_bucket), '*', 'data')
def ValidateBucketForCertificateAuthority(bucket_name): """Validates that a user-specified bucket can be used with a Private CA. Args: bucket_name: The name of the GCS bucket to validate. Returns: A BucketReference wrapping the given bucket name. Raises: InvalidArgumentException: when the given bucket can't be used with a CA. """ messages = storage_util.GetMessages() client = storage_api.StorageClient(messages=messages) try: bucket = client.GetBucket( bucket_name, messages.StorageBucketsGetRequest.ProjectionValueValuesEnum.full) if not _BucketAllowsPublicObjectReads(bucket): # Show a warning but don't fail, since this could be intentional. log.warning( 'The specified bucket does not publicly expose new objects by ' 'default, so some clients may not be able to access the CA ' 'certificate or CRLs. For more details, see ' 'https://cloud.google.com/storage/docs/access-control/making-data-public' ) return storage_util.BucketReference(bucket_name) except storage_api.BucketNotFoundError: raise exceptions.InvalidArgumentException( 'gcs-bucket', 'The given bucket does not exist.')
def testDagsImport(self, use_gsutil, exec_mock): """Tests successful DAG importing.""" self.ExpectEnvironmentGet(self.TEST_PROJECT, self.TEST_LOCATION, self.TEST_ENVIRONMENT_ID, response=self.MakeEnvironmentWithBucket()) source = 'subdir/file.txt' if use_gsutil: self._SetUpGsutil() fake_exec = kubectl_util.FakeExec() exec_mock.side_effect = fake_exec fake_exec.AddCallback( 0, self.MakeGsutilExecCallback([ '-m', 'cp', '-r', source, self.test_gcs_bucket_path + '/dags/' ])) else: self._SetUpStorageApi() self.RunEnvironments('storage', 'dags', 'import', '--project', self.TEST_PROJECT, '--location', self.TEST_LOCATION, '--environment', self.TEST_ENVIRONMENT_ID, '--source', source) if use_gsutil: fake_exec.Verify() else: self.import_mock.assert_called_once_with( storage_util.BucketReference(self.test_gcs_bucket), source, 'dags/')
def Run(self, args): lake_ref = args.CONCEPTS.project.Parse() service_account = 'service-' + str( project_util.GetProjectNumber(lake_ref.projectsId) ) + '@gcp-sa-dataplex.iam.gserviceaccount.com' if args.IsSpecified('storage_bucket_resource'): return lake.RemoveServiceAccountFromBucketPolicy( storage_util.BucketReference(args.storage_bucket_resource), 'serviceAccount:' + service_account, 'roles/dataplex.serviceAgent') if args.IsSpecified('bigquery_dataset_resource'): get_dataset_request = apis.GetMessagesModule( 'bigquery', 'v2').BigqueryDatasetsGetRequest( datasetId=args.bigquery_dataset_resource, projectId=args.secondary_project) dataset = apis.GetClientInstance( 'bigquery', 'v2').datasets.Get(request=get_dataset_request) lake.RemoveServiceAccountFromDatasetPolicy( dataset, service_account, 'roles/dataplex.serviceAgent') return apis.GetClientInstance('bigquery', 'v2').datasets.Patch( apis.GetMessagesModule( 'bigquery', 'v2').BigqueryDatasetsPatchRequest( datasetId=args.bigquery_dataset_resource, projectId=args.secondary_project, dataset=dataset)) if args.IsSpecified('project_resource'): return projects_api.RemoveIamPolicyBinding( project_util.ParseProject(args.project_resource), 'serviceAccount:' + service_account, 'roles/dataplex.serviceAgent')
def testPluginsDeleteTargetSpecified(self, use_gsutil, exec_mock): """Tests successful plugins deleting for a specific file.""" self.ExpectEnvironmentGet( self.TEST_PROJECT, self.TEST_LOCATION, self.TEST_ENVIRONMENT_ID, response=self.MakeEnvironmentWithBucket()) subdir_ref = storage_util.ObjectReference(self.test_gcs_bucket, 'plugins/') self.ExpectObjectGet(subdir_ref) target = 'subdir/file.txt' if use_gsutil: self._SetUpGsutil() fake_exec = kubectl_util.FakeExec() exec_mock.side_effect = fake_exec fake_exec.AddCallback( 0, self.MakeGsutilExecCallback( ['-m', 'rm', '-r', '{}/plugins/{}'.format(self.test_gcs_bucket_path, target)])) else: self._SetUpStorageApi() self.RunEnvironments('storage', 'plugins', 'delete', '--project', self.TEST_PROJECT, '--location', self.TEST_LOCATION, '--environment', self.TEST_ENVIRONMENT_ID, target) if use_gsutil: fake_exec.Verify() else: self.delete_mock.assert_called_once_with( storage_util.BucketReference(self.test_gcs_bucket), target, 'plugins')
def SetUp(self): self.mocked_storage_v1 = api_mock.Client( core_apis.GetClientClass('storage', 'v1')) self.mocked_storage_v1.Mock() self.addCleanup(self.mocked_storage_v1.Unmock) self.storage_v1_messages = core_apis.GetMessagesModule('storage', 'v1') self.bucket_reference = storage_util.BucketReference(self._BUCKET_NAME)
def testValidateBucketForCaWithoutPublicAclsPrintsWarning(self): bucket_name = 'foo' self.client.buckets.Get.Expect( request=self.messages.StorageBucketsGetRequest( bucket=bucket_name, projection=self.messages.StorageBucketsGetRequest. ProjectionValueValuesEnum.full), response=self.messages.Bucket()) self.assertEqual( storage.ValidateBucketForCertificateAuthority(bucket_name), storage_util.BucketReference(bucket_name)) self.AssertLogContains( 'does not publicly expose new objects by default')
def testValidateBucketForCaWithPublicAclsPrintsNothing(self): bucket_name = 'foo' self.client.buckets.Get.Expect( request=self.messages.StorageBucketsGetRequest( bucket=bucket_name, projection=self.messages.StorageBucketsGetRequest. ProjectionValueValuesEnum.full), response=self.messages.Bucket(defaultObjectAcl=[ self.messages.ObjectAccessControl(entity='allUsers', role='READER') ])) self.assertEqual( storage.ValidateBucketForCertificateAuthority(bucket_name), storage_util.BucketReference(bucket_name)) self.AssertLogEquals('')
def CreateBucketForCertificateAuthority(ca_ref): """Creates a GCS bucket for use by the given Certificate Authority.""" client = storage_util.GetClient() messages = storage_util.GetMessages() location = ca_ref.Parent().Name() project = ca_ref.Parent().Parent().Name() bucket_name = _BUCKET_NAMING_PATTERN.format(uuid=uuid.uuid4()) client.buckets.Insert( messages.StorageBucketsInsertRequest(project=project, bucket=messages.Bucket( name=bucket_name, location=location))) return storage_util.BucketReference(bucket_name)
def CreateBucketForCertificateAuthority(ca_ref): """Creates a GCS bucket for use by the given Certificate Authority.""" client = storage_util.GetClient() messages = storage_util.GetMessages() location = ca_ref.Parent().Name() project = ca_ref.Parent().Parent().Name() bucket_name = _BUCKET_NAMING_PATTERN.format(uuid=uuid.uuid4()) labels = messages.Bucket.LabelsValue(additionalProperties=[ messages.Bucket.LabelsValue.AdditionalProperty( key='certificate_authority_id', value=ca_ref.RelativeName()) ]) client.buckets.Insert( messages.StorageBucketsInsertRequest( project=project, bucket=messages.Bucket( name=bucket_name, location=location, labels=labels))) return storage_util.BucketReference(bucket_name)
def CreateBucketForCertificateAuthority(ca_ref): """Creates a GCS bucket for use by the given Certificate Authority.""" client = storage_util.GetClient() messages = storage_util.GetMessages() location = ca_ref.Parent().Name() project = ca_ref.Parent().Parent().Name() bucket_name = _BUCKET_NAMING_PATTERN.format(uuid=uuid.uuid4()) client.buckets.Insert( messages.StorageBucketsInsertRequest( project=project, predefinedDefaultObjectAcl=messages.StorageBucketsInsertRequest. PredefinedDefaultObjectAclValueValuesEnum.publicRead, bucket=messages.Bucket( name=bucket_name, location=location, versioning=messages.Bucket.VersioningValue(enabled=True)))) return storage_util.BucketReference(bucket_name)
def testCreateBucketCreatesCorrectBucket(self, mock_uuid): bucket_uuid = '28657537-369c-41c6-81fe-0212b41cc732' expected_bucket_name = 'privateca_content_{}'.format(bucket_uuid) mock_uuid.return_value = uuid.UUID(bucket_uuid) self.client.buckets.Insert.Expect( request=self.messages.StorageBucketsInsertRequest( project='foo', predefinedDefaultObjectAcl=self.messages. StorageBucketsInsertRequest. PredefinedDefaultObjectAclValueValuesEnum.publicRead, bucket=self.messages.Bucket( name=expected_bucket_name, location='us-west1', versioning=self.messages.Bucket.VersioningValue( enabled=True))), response=self.messages.Bucket()) result = storage.CreateBucketForCertificateAuthority(self.ca_ref) self.assertEqual(result, storage_util.BucketReference(expected_bucket_name))
def SetUp(self): self.bucket_ref = storage_util.BucketReference(self._BUCKET_NAME) self.key_ref = GetCryptoKeyRef(self._KEY_NAME) self.project_ref = command_lib_util.ParseProject(self._PROJECT_ID)