def _create_dao_map(): """Create a map of DAOs. These will be re-usable so that the db connection can apply across different pipelines. Returns: Dictionary of DAOs. """ try: return { 'backend_service_dao': backend_service_dao.BackendServiceDao(), 'bucket_dao': bucket_dao.BucketDao(), 'cloudsql_dao': cloudsql_dao.CloudsqlDao(), 'dao': dao.Dao(), 'folder_dao': folder_dao.FolderDao(), 'forwarding_rules_dao': forwarding_rules_dao.ForwardingRulesDao(), 'instance_dao': instance_dao.InstanceDao(), 'instance_group_dao': instance_group_dao.InstanceGroupDao(), 'instance_group_manager_dao': instance_group_manager_dao.InstanceGroupManagerDao(), 'instance_template_dao': instance_template_dao.InstanceTemplateDao(), 'organization_dao': organization_dao.OrganizationDao(), 'project_dao': project_dao.ProjectDao(), } except data_access_errors.MySQLError as e: LOGGER.error('Error to creating DAO map.\n%s', e) sys.exit()
def _get_cloudsql_acls(self): """Get CloudSQL acls from data source. Returns: list: List of CloudSql acls. """ cloudsql_acls = {} cloudsql_acls = (cloudsql_dao.CloudsqlDao( self.global_configs).get_cloudsql_acls('cloudsql_instances', self.snapshot_timestamp)) return cloudsql_acls
def _create_dao_map(global_configs): """Create a map of DAOs. These will be reusable so that the db connection can apply across different pipelines. Args: global_configs (dict): Global configurations. Returns: dict: Dictionary of DAOs. """ try: return { 'appengine_dao': appengine_dao.AppEngineDao(global_configs), 'backend_service_dao': backend_service_dao.BackendServiceDao(global_configs), 'bucket_dao': bucket_dao.BucketDao(global_configs), 'cloudsql_dao': cloudsql_dao.CloudsqlDao(global_configs), 'dao': dao.Dao(global_configs), 'firewall_rule_dao': firewall_rule_dao.FirewallRuleDao(global_configs), 'folder_dao': folder_dao.FolderDao(global_configs), 'forseti_system_dao': forseti_system_dao.ForsetiSystemDao(global_configs), 'forwarding_rules_dao': forwarding_rules_dao.ForwardingRulesDao(global_configs), 'ke_dao': ke_dao.KeDao(global_configs), 'instance_dao': instance_dao.InstanceDao(global_configs), 'instance_group_dao': instance_group_dao.InstanceGroupDao(global_configs), 'instance_group_manager_dao': instance_group_manager_dao.InstanceGroupManagerDao(global_configs), 'instance_template_dao': instance_template_dao.InstanceTemplateDao(global_configs), 'organization_dao': organization_dao.OrganizationDao(global_configs), 'project_dao': project_dao.ProjectDao(global_configs), 'service_account_dao': service_account_dao.ServiceAccountDao(global_configs), } except data_access_errors.MySQLError as e: LOGGER.error('Error to creating DAO map.\n%s', e) sys.exit()
def main(_): """Runs the Inventory Loader.""" try: dao = Dao() project_dao = proj_dao.ProjectDao() organization_dao = org_dao.OrganizationDao() bucket_dao = buck_dao.BucketDao() cloudsql_dao = sql_dao.CloudsqlDao() fwd_rules_dao = fr_dao.ForwardingRulesDao() folder_dao = folder_resource_dao.FolderDao() except data_access_errors.MySQLError as e: LOGGER.error('Encountered error with Cloud SQL. Abort.\n%s', e) sys.exit() cycle_time, cycle_timestamp = _start_snapshot_cycle(dao) configs = FLAGS.FlagValuesDict() _configure_logging(configs) try: pipelines = _build_pipelines(cycle_timestamp, configs, dao=dao, project_dao=project_dao, organization_dao=organization_dao, bucket_dao=bucket_dao, fwd_rules_dao=fwd_rules_dao, folder_dao=folder_dao, cloudsql_dao=cloudsql_dao) except (api_errors.ApiExecutionError, inventory_errors.LoadDataPipelineError) as e: LOGGER.error('Unable to build pipelines.\n%s', e) sys.exit() run_statuses = _run_pipelines(pipelines) if all(run_statuses): snapshot_cycle_status = 'SUCCESS' elif any(run_statuses): snapshot_cycle_status = 'PARTIAL_SUCCESS' else: snapshot_cycle_status = 'FAILURE' _complete_snapshot_cycle(dao, cycle_timestamp, snapshot_cycle_status) if configs.get('email_recipient') is not None: _send_email(cycle_time, cycle_timestamp, snapshot_cycle_status, pipelines, configs.get('sendgrid_api_key'), configs.get('email_sender'), configs.get('email_recipient'))
def setUp(self, mock_db_connector): mock_db_connector.return_value = None self.cloudsql_dao = cloudsql_dao.CloudsqlDao() self.resource_name = 'cloudsql_instances' self.fake_timestamp = '12345'