def setUp(self):
     """Set up."""
     self.cycle_timestamp = '20001225T120000Z'
     self.configs = fake_configs.FAKE_CONFIGS
     self.mock_crm = mock.create_autospec(crm.CloudResourceManagerClient)
     self.mock_dao = mock.create_autospec(proj_dao.ProjectDao)
     load_projects_pipeline.LOGGER = mock.MagicMock()
     self.pipeline = (load_projects_pipeline.LoadProjectsPipeline(
         self.cycle_timestamp, self.configs, self.mock_crm, self.mock_dao))
def _build_pipelines(cycle_timestamp, configs, **kwargs):
    """Build the pipelines to load data.

    Args:
        cycle_timestamp: String of timestamp, formatted as YYYYMMDDTHHMMSSZ.
        configs: Dictionary of configurations.
        kwargs: Extra configs.

    Returns:
        List of pipelines that will be run.

    Raises: inventory_errors.LoadDataPipelineError.
    """

    pipelines = []

    # Commonly used clients for shared ratelimiter re-use.
    crm_v1_api_client = crm.CloudResourceManagerClient()
    dao = kwargs.get('dao')
    gcs_api_client = gcs.StorageClient()

    organization_dao_name = 'organization_dao'
    project_dao_name = 'project_dao'

    # The order here matters, e.g. groups_pipeline must come before
    # group_members_pipeline.
    pipelines = [
        load_orgs_pipeline.LoadOrgsPipeline(cycle_timestamp, configs,
                                            crm_v1_api_client,
                                            kwargs.get(organization_dao_name)),
        load_org_iam_policies_pipeline.LoadOrgIamPoliciesPipeline(
            cycle_timestamp, configs, crm_v1_api_client,
            kwargs.get(organization_dao_name)),
        load_projects_pipeline.LoadProjectsPipeline(
            cycle_timestamp, configs, crm_v1_api_client,
            kwargs.get(project_dao_name)),
        load_projects_iam_policies_pipeline.LoadProjectsIamPoliciesPipeline(
            cycle_timestamp, configs, crm_v1_api_client,
            kwargs.get(project_dao_name)),
        load_projects_buckets_pipeline.LoadProjectsBucketsPipeline(
            cycle_timestamp, configs, gcs_api_client,
            kwargs.get(project_dao_name)),
        load_projects_buckets_acls_pipeline.LoadProjectsBucketsAclsPipeline(
            cycle_timestamp, configs, gcs_api_client,
            kwargs.get('bucket_dao')),
        load_projects_cloudsql_pipeline.LoadProjectsCloudsqlPipeline(
            cycle_timestamp, configs, cloudsql.CloudsqlClient(),
            kwargs.get('cloudsql_dao')),
        load_forwarding_rules_pipeline.LoadForwardingRulesPipeline(
            cycle_timestamp, configs, compute.ComputeClient(),
            kwargs.get('fwd_rules_dao')),
        load_folders_pipeline.LoadFoldersPipeline(
            cycle_timestamp, configs,
            crm.CloudResourceManagerClient(version='v2beta1'), dao),
        load_bigquery_datasets_pipeline.LoadBigQueryDatasetsPipeline(
            cycle_timestamp, configs, bq.BigQueryClient(), dao),
        load_firewall_rules_pipeline.LoadFirewallRulesPipeline(
            cycle_timestamp, configs, compute.ComputeClient(version='beta'),
            kwargs.get(project_dao_name)),
    ]

    if configs.get('inventory_groups'):
        if util.can_inventory_groups(configs):
            admin_api_client = ad.AdminDirectoryClient()
            pipelines.extend([
                load_groups_pipeline.LoadGroupsPipeline(
                    cycle_timestamp, configs, admin_api_client, dao),
                load_group_members_pipeline.LoadGroupMembersPipeline(
                    cycle_timestamp, configs, admin_api_client, dao)
            ])
        else:
            raise inventory_errors.LoadDataPipelineError(
                'Unable to inventory groups with specified arguments:\n%s',
                configs)

    return pipelines