def exists(self): """Verify that the org exists. Returns: True if we can get the org from GCP, otherwise False. """ crm_client = crm.CloudResourceManagerClient() org = crm_client.get_organization(self.name) return org is not None
def exists(self): """Verify that the folder exists. Returns: True if we can get the folder from GCP, otherwise False. """ crm_client = crm.CloudResourceManagerClient() folder = crm_client.get_folder(self.id) return folder is not None
def exists(self): """Verify that the project exists. Returns: True if we can get the project from GCP, otherwise False. """ crm_client = crm.CloudResourceManagerClient() project = crm_client.get_project(self.id) return project is not None
def setUp(self, mock_base_client, mock_flags): """Set up.""" mock_flags.max_crm_api_calls_per_100_seconds = ( self.MAX_CRM_API_CALLS_PER_100_SECONDS) self.crm_api_client = crm.CloudResourceManagerClient() self.crm_api_client.service = mock.MagicMock() self.crm_api_client.service.projects = mock.MagicMock() self.crm_api_client.service.organizations = mock.MagicMock() self.fake_projects_api_response1 = \ fake_projects.FAKE_PROJECTS_API_RESPONSE1 self.expected_fake_projects1 = fake_projects.EXPECTED_FAKE_PROJECTS1 self.expected_fake_active_projects1 = \ fake_projects.EXPECTED_FAKE_ACTIVE_PROJECTS1
def setUp(self, mock_google_credential, mock_discovery): """Set up.""" fake_global_configs = { 'max_crm_api_calls_per_100_seconds': self.MAX_CRM_API_CALLS_PER_100_SECONDS } self.crm_api_client = crm.CloudResourceManagerClient( global_configs=fake_global_configs) self.crm_api_client.service = mock.MagicMock() self.crm_api_client.service.projects = mock.MagicMock() self.crm_api_client.service.organizations = mock.MagicMock() self.fake_projects_api_response1 = \ fake_projects.FAKE_PROJECTS_API_RESPONSE1 self.expected_fake_projects1 = fake_projects.EXPECTED_FAKE_PROJECTS1 self.expected_fake_active_projects1 = \ fake_projects.EXPECTED_FAKE_ACTIVE_PROJECTS1
def test_no_quota(self, mock_google_credential): """Verify no rate limiter is used if the configuration is missing.""" crm_api_client = crm.CloudResourceManagerClient(global_configs={}) self.assertEqual(None, crm_api_client.repository._rate_limiter)
def setUpClass(cls, mock_google_credential): """Set up.""" fake_global_configs = {'max_crm_api_calls_per_100_seconds': 1000000} cls.crm_api_client = crm.CloudResourceManagerClient( global_configs=fake_global_configs, use_rate_limiter=False) cls.project_id = fake_crm_responses.FAKE_PROJECT_ID
def _build_pipelines(cycle_timestamp, configs, **kwargs): """Build the pipelines to load data. Args: cycle_timestamp: String of timestamp, formatted as YYYYMMDDTHHMMSSZ. configs: Dictionary of configurations. kwargs: Extra configs. Returns: List of pipelines that will be run. Raises: inventory_errors.LoadDataPipelineError. """ pipelines = [] # Commonly used clients for shared ratelimiter re-use. crm_v1_api_client = crm.CloudResourceManagerClient() dao = kwargs.get('dao') gcs_api_client = gcs.StorageClient() organization_dao_name = 'organization_dao' project_dao_name = 'project_dao' # The order here matters, e.g. groups_pipeline must come before # group_members_pipeline. pipelines = [ load_orgs_pipeline.LoadOrgsPipeline(cycle_timestamp, configs, crm_v1_api_client, kwargs.get(organization_dao_name)), load_org_iam_policies_pipeline.LoadOrgIamPoliciesPipeline( cycle_timestamp, configs, crm_v1_api_client, kwargs.get(organization_dao_name)), load_projects_pipeline.LoadProjectsPipeline( cycle_timestamp, configs, crm_v1_api_client, kwargs.get(project_dao_name)), load_projects_iam_policies_pipeline.LoadProjectsIamPoliciesPipeline( cycle_timestamp, configs, crm_v1_api_client, kwargs.get(project_dao_name)), load_projects_buckets_pipeline.LoadProjectsBucketsPipeline( cycle_timestamp, configs, gcs_api_client, kwargs.get(project_dao_name)), load_projects_buckets_acls_pipeline.LoadProjectsBucketsAclsPipeline( cycle_timestamp, configs, gcs_api_client, kwargs.get('bucket_dao')), load_projects_cloudsql_pipeline.LoadProjectsCloudsqlPipeline( cycle_timestamp, configs, cloudsql.CloudsqlClient(), kwargs.get('cloudsql_dao')), load_forwarding_rules_pipeline.LoadForwardingRulesPipeline( cycle_timestamp, configs, compute.ComputeClient(), kwargs.get('fwd_rules_dao')), load_folders_pipeline.LoadFoldersPipeline( cycle_timestamp, configs, crm.CloudResourceManagerClient(version='v2beta1'), dao), load_bigquery_datasets_pipeline.LoadBigQueryDatasetsPipeline( cycle_timestamp, configs, bq.BigQueryClient(), dao), load_firewall_rules_pipeline.LoadFirewallRulesPipeline( cycle_timestamp, configs, compute.ComputeClient(version='beta'), kwargs.get(project_dao_name)), ] if configs.get('inventory_groups'): if util.can_inventory_groups(configs): admin_api_client = ad.AdminDirectoryClient() pipelines.extend([ load_groups_pipeline.LoadGroupsPipeline( cycle_timestamp, configs, admin_api_client, dao), load_group_members_pipeline.LoadGroupMembersPipeline( cycle_timestamp, configs, admin_api_client, dao) ]) else: raise inventory_errors.LoadDataPipelineError( 'Unable to inventory groups with specified arguments:\n%s', configs) return pipelines