def _create_dao_map(): """Create a map of DAOs. These will be re-usable so that the db connection can apply across different pipelines. Returns: Dictionary of DAOs. """ try: return { 'backend_service_dao': backend_service_dao.BackendServiceDao(), 'bucket_dao': bucket_dao.BucketDao(), 'cloudsql_dao': cloudsql_dao.CloudsqlDao(), 'dao': dao.Dao(), 'folder_dao': folder_dao.FolderDao(), 'forwarding_rules_dao': forwarding_rules_dao.ForwardingRulesDao(), 'instance_dao': instance_dao.InstanceDao(), 'instance_group_dao': instance_group_dao.InstanceGroupDao(), 'instance_group_manager_dao': instance_group_manager_dao.InstanceGroupManagerDao(), 'instance_template_dao': instance_template_dao.InstanceTemplateDao(), 'organization_dao': organization_dao.OrganizationDao(), 'project_dao': project_dao.ProjectDao(), } except data_access_errors.MySQLError as e: LOGGER.error('Error to creating DAO map.\n%s', e) sys.exit()
def setUp(self, mock_db_connector): mock_db_connector.return_value = None self.org_dao = organization_dao.OrganizationDao() self.fetch_mock = mock.MagicMock() self.org_dao.execute_sql_with_fetch = self.fetch_mock self.resource_name = 'organizations' self.fake_timestamp = '12345' self.fake_orgs_db_rows = fake_orgs.FAKE_ORGS_DB_ROWS self.fake_orgs_ok_iam_db_rows = fake_orgs.FAKE_ORGS_OK_IAM_DB_ROWS self.fake_orgs_bad_iam_db_rows = fake_orgs.FAKE_ORGS_BAD_IAM_DB_ROWS
def _create_dao_map(global_configs): """Create a map of DAOs. These will be reusable so that the db connection can apply across different pipelines. Args: global_configs (dict): Global configurations. Returns: dict: Dictionary of DAOs. """ try: return { 'appengine_dao': appengine_dao.AppEngineDao(global_configs), 'backend_service_dao': backend_service_dao.BackendServiceDao(global_configs), 'bucket_dao': bucket_dao.BucketDao(global_configs), 'cloudsql_dao': cloudsql_dao.CloudsqlDao(global_configs), 'dao': dao.Dao(global_configs), 'firewall_rule_dao': firewall_rule_dao.FirewallRuleDao(global_configs), 'folder_dao': folder_dao.FolderDao(global_configs), 'forseti_system_dao': forseti_system_dao.ForsetiSystemDao(global_configs), 'forwarding_rules_dao': forwarding_rules_dao.ForwardingRulesDao(global_configs), 'ke_dao': ke_dao.KeDao(global_configs), 'instance_dao': instance_dao.InstanceDao(global_configs), 'instance_group_dao': instance_group_dao.InstanceGroupDao(global_configs), 'instance_group_manager_dao': instance_group_manager_dao.InstanceGroupManagerDao(global_configs), 'instance_template_dao': instance_template_dao.InstanceTemplateDao(global_configs), 'organization_dao': organization_dao.OrganizationDao(global_configs), 'project_dao': project_dao.ProjectDao(global_configs), 'service_account_dao': service_account_dao.ServiceAccountDao(global_configs), } except data_access_errors.MySQLError as e: LOGGER.error('Error to creating DAO map.\n%s', e) sys.exit()
def main(_): """Runs the Inventory Loader.""" try: dao = Dao() project_dao = proj_dao.ProjectDao() organization_dao = org_dao.OrganizationDao() bucket_dao = buck_dao.BucketDao() cloudsql_dao = sql_dao.CloudsqlDao() fwd_rules_dao = fr_dao.ForwardingRulesDao() folder_dao = folder_resource_dao.FolderDao() except data_access_errors.MySQLError as e: LOGGER.error('Encountered error with Cloud SQL. Abort.\n%s', e) sys.exit() cycle_time, cycle_timestamp = _start_snapshot_cycle(dao) configs = FLAGS.FlagValuesDict() _configure_logging(configs) try: pipelines = _build_pipelines(cycle_timestamp, configs, dao=dao, project_dao=project_dao, organization_dao=organization_dao, bucket_dao=bucket_dao, fwd_rules_dao=fwd_rules_dao, folder_dao=folder_dao, cloudsql_dao=cloudsql_dao) except (api_errors.ApiExecutionError, inventory_errors.LoadDataPipelineError) as e: LOGGER.error('Unable to build pipelines.\n%s', e) sys.exit() run_statuses = _run_pipelines(pipelines) if all(run_statuses): snapshot_cycle_status = 'SUCCESS' elif any(run_statuses): snapshot_cycle_status = 'PARTIAL_SUCCESS' else: snapshot_cycle_status = 'FAILURE' _complete_snapshot_cycle(dao, cycle_timestamp, snapshot_cycle_status) if configs.get('email_recipient') is not None: _send_email(cycle_time, cycle_timestamp, snapshot_cycle_status, pipelines, configs.get('sendgrid_api_key'), configs.get('email_sender'), configs.get('email_recipient'))
def _get_org_iam_policies(self): """Get orgs from data source. Returns: dict: Org policies from inventory. """ org_policies = {} try: org_dao = organization_dao.OrganizationDao(self.global_configs) org_policies = org_dao.get_org_iam_policies( 'organizations', self.snapshot_timestamp) except db_errors.MySQLError as e: LOGGER.error('Error getting Organization IAM policies: %s', e) return org_policies
def _get_org_policies(self): """Get orgs from data source. Args: timestamp: The snapshot timestamp. Returns: The org policies. """ org_policies = {} try: org_dao = organization_dao.OrganizationDao() org_policies = org_dao.get_org_iam_policies( 'organizations', self.snapshot_timestamp) except da_errors.MySQLError as e: LOGGER.error('Error getting Organization IAM policies: %s', e) return org_policies
def __init__(self): """Initialize.""" # Map the org resource type to the appropriate dao class self._resource_db_lookup = { resource.ResourceType.ORGANIZATION: { 'dao': organization_dao.OrganizationDao(), 'get': 'get_organization', }, resource.ResourceType.FOLDER: { 'dao': folder_dao.FolderDao(), 'get': 'get_folder', }, resource.ResourceType.PROJECT: { 'dao': project_dao.ProjectDao(), 'get': 'get_project', } }