def __init__(self, resource, cycle_timestamp, violations, notifier_config, pipeline_config): """Constructor for the base pipeline. Args: resource: violation resource name cycle_timestamp: String of timestamp, formatted as YYYYMMDDTHHMMSSZ. violations: Dictonary of violations notifier_config: Dictionary of notifier configurations. pipeline_config: Dictonary of pipeline confogurations. Returns: None """ self.cycle_timestamp = cycle_timestamp self.resource = resource self.notifier_config = notifier_config self.pipeline_config = pipeline_config # TODO: import api_client # self.api_client = api_client # Initializing DAOs self.dao = dao.Dao() self.project_dao = project_dao.ProjectDao() self.violation_dao = violation_dao.ViolationDao() # Get violations self.violations = violations
def _create_dao_map(): """Create a map of DAOs. These will be re-usable so that the db connection can apply across different pipelines. Returns: Dictionary of DAOs. """ try: return { 'backend_service_dao': backend_service_dao.BackendServiceDao(), 'bucket_dao': bucket_dao.BucketDao(), 'cloudsql_dao': cloudsql_dao.CloudsqlDao(), 'dao': dao.Dao(), 'folder_dao': folder_dao.FolderDao(), 'forwarding_rules_dao': forwarding_rules_dao.ForwardingRulesDao(), 'instance_dao': instance_dao.InstanceDao(), 'instance_group_dao': instance_group_dao.InstanceGroupDao(), 'instance_group_manager_dao': instance_group_manager_dao.InstanceGroupManagerDao(), 'instance_template_dao': instance_template_dao.InstanceTemplateDao(), 'organization_dao': organization_dao.OrganizationDao(), 'project_dao': project_dao.ProjectDao(), } except data_access_errors.MySQLError as e: LOGGER.error('Error to creating DAO map.\n%s', e) sys.exit()
def __init__(self, resource, cycle_timestamp, violations, global_configs, notifier_config, pipeline_config): """Constructor for the base pipeline. Args: resource (str): Violation resource name. cycle_timestamp (str): Snapshot timestamp, formatted as YYYYMMDDTHHMMSSZ. violations (dict): Violations. global_configs (dict): Global configurations. notifier_config (dict): Notifier configurations. pipeline_config (dict): Pipeline configurations. """ self.cycle_timestamp = cycle_timestamp self.resource = resource self.global_configs = global_configs self.notifier_config = notifier_config self.pipeline_config = pipeline_config # TODO: import api_client # self.api_client = api_client # Initializing DAOs self.dao = dao.Dao(global_configs) self.project_dao = project_dao.ProjectDao(global_configs) self.violation_dao = violation_dao.ViolationDao(global_configs) # Get violations self.violations = violations
def _retrieve(self): """Retrieve firewall rules from GCP. Get all the projects in the current snapshot and retrieve the firewall rules for each. Projects without firewalls will also be captured. Returns: dict: A dict mapping projects with a list of their firewall rules. {project_id1: [firewall_rule1a, firewall_rule1b], project_id2: [firewall_rule2a, firewall_rule2b], project_id3: [firewall_rule3a, firewall_rule3b]} """ firewall_rules_map = {} projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) for project in projects: try: firewall_rules = self.api_client.get_firewall_rules(project.id) firewall_rules_map[project.id] = firewall_rules except api_errors.ApiExecutionError as e: LOGGER.error( 'Unable to get firewall rules for ' 'project id: %s\n%s', project.id, e) return firewall_rules_map
def _retrieve(self): """Retrieve forwarding rules from GCP. Get all the projects in the current snapshot and retrieve the compute forwarding rules for each. Returns: A dict mapping projects with their forwarding rules (list): {project_id: [forwarding_rules]} """ projects = proj_dao.ProjectDao().get_projects(self.cycle_timestamp) forwarding_rules = {} for project in projects: project_fwd_rules = [] try: response = self.api_client.get_forwarding_rules(project.id) for page in response: items = page.get('items', {}) for region_fwd_rules in items.values(): fwd_rules = region_fwd_rules.get('forwardingRules', []) project_fwd_rules.extend(fwd_rules) except api_errors.ApiExecutionError as e: LOGGER.error(inventory_errors.LoadDataPipelineError(e)) if project_fwd_rules: forwarding_rules[project.id] = project_fwd_rules return forwarding_rules
def _get_project_policies(self): """Get projects from data source. """ project_policies = {} project_policies = ( project_dao.ProjectDao().get_project_policies('projects', self.\ snapshot_timestamp)) return project_policies
def _get_project_dao(self): """Init or get project_dao. Returns: project_dao: ProjectDao instance """ if not self.project_dao: self.project_dao = project_dao.ProjectDao(self.global_configs) return self.project_dao
def _get_project_policies(self): """Get projects from data source. Returns: dict: project policies """ project_policies = {} project_policies = (project_dao.ProjectDao( self.global_configs).get_project_policies('projects', self.snapshot_timestamp)) return project_policies
def _get_project_policies(self): """Get projects from data source. Returns: dict: If successful returns a dictionary of project policies """ project_policies = {} project_policies = ( project_dao.ProjectDao().get_project_policies('projects', self.\ snapshot_timestamp)) return project_policies
def _create_dao_map(global_configs): """Create a map of DAOs. These will be reusable so that the db connection can apply across different pipelines. Args: global_configs (dict): Global configurations. Returns: dict: Dictionary of DAOs. """ try: return { 'appengine_dao': appengine_dao.AppEngineDao(global_configs), 'backend_service_dao': backend_service_dao.BackendServiceDao(global_configs), 'bucket_dao': bucket_dao.BucketDao(global_configs), 'cloudsql_dao': cloudsql_dao.CloudsqlDao(global_configs), 'dao': dao.Dao(global_configs), 'firewall_rule_dao': firewall_rule_dao.FirewallRuleDao(global_configs), 'folder_dao': folder_dao.FolderDao(global_configs), 'forseti_system_dao': forseti_system_dao.ForsetiSystemDao(global_configs), 'forwarding_rules_dao': forwarding_rules_dao.ForwardingRulesDao(global_configs), 'ke_dao': ke_dao.KeDao(global_configs), 'instance_dao': instance_dao.InstanceDao(global_configs), 'instance_group_dao': instance_group_dao.InstanceGroupDao(global_configs), 'instance_group_manager_dao': instance_group_manager_dao.InstanceGroupManagerDao(global_configs), 'instance_template_dao': instance_template_dao.InstanceTemplateDao(global_configs), 'organization_dao': organization_dao.OrganizationDao(global_configs), 'project_dao': project_dao.ProjectDao(global_configs), 'service_account_dao': service_account_dao.ServiceAccountDao(global_configs), } except data_access_errors.MySQLError as e: LOGGER.error('Error to creating DAO map.\n%s', e) sys.exit()
def _retrieve(self): """Retrieve KE data from GCP. Get all the projects in the current snapshot and retrieve the the clusters for each project. For each distinct zone in cluster, get the server config. server_config data will be incorporated into the cluster data as there is a 1:1 relationship, which saves adding another table. Returns: dict: Mapping projects with their KE clusters: {project1: [clusters], project2: [clusters], project3: [clusters]} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) ke_services = {} for project in projects: clusters = self.safe_api_call('get_clusters', project.id) if clusters: for cluster in clusters: # TODO: Cache the server_config response and only make the # API call for zones that we don't have data on # Check if this practically would make a difference, i.e. # would users really have multiple clusters in the same zone # for redundancy. self_link_parts = cluster.get('selfLink').split('/') zone = None location = None if 'zones' in self_link_parts: zone = self_link_parts[self_link_parts.index('zones') + 1] elif 'locations' in self_link_parts: location = self_link_parts[ self_link_parts.index('locations') + 1] else: LOGGER.error('Cluster has no zone or location: %s', cluster) cluster['serverConfig'] = {} continue server_config = self.safe_api_call('get_serverconfig', project.id, zone=zone, location=location) cluster['serverConfig'] = server_config ke_services[project.id] = clusters return ke_services
def setUp(self, mock_db_connector): mock_db_connector.return_value = None self.project_dao = project_dao.ProjectDao() self.fetch_mock = mock.MagicMock() self.project_dao.execute_sql_with_fetch = self.fetch_mock self.resource_name = 'projects' self.fake_timestamp = '12345' self.fake_projects_db_rows = fake_projects.FAKE_PROJECTS_DB_ROWS self.fake_projects_bad_iam_db_rows = \ fake_projects.FAKE_PROJECTS_BAD_IAM_DB_ROWS self.fake_projects_iam_rows = \ fake_projects.FAKE_PROJECTS_OK_IAM_DB_ROWS
def main(_): """Runs the Inventory Loader.""" try: dao = Dao() project_dao = proj_dao.ProjectDao() organization_dao = org_dao.OrganizationDao() bucket_dao = buck_dao.BucketDao() cloudsql_dao = sql_dao.CloudsqlDao() fwd_rules_dao = fr_dao.ForwardingRulesDao() folder_dao = folder_resource_dao.FolderDao() except data_access_errors.MySQLError as e: LOGGER.error('Encountered error with Cloud SQL. Abort.\n%s', e) sys.exit() cycle_time, cycle_timestamp = _start_snapshot_cycle(dao) configs = FLAGS.FlagValuesDict() _configure_logging(configs) try: pipelines = _build_pipelines(cycle_timestamp, configs, dao=dao, project_dao=project_dao, organization_dao=organization_dao, bucket_dao=bucket_dao, fwd_rules_dao=fwd_rules_dao, folder_dao=folder_dao, cloudsql_dao=cloudsql_dao) except (api_errors.ApiExecutionError, inventory_errors.LoadDataPipelineError) as e: LOGGER.error('Unable to build pipelines.\n%s', e) sys.exit() run_statuses = _run_pipelines(pipelines) if all(run_statuses): snapshot_cycle_status = 'SUCCESS' elif any(run_statuses): snapshot_cycle_status = 'PARTIAL_SUCCESS' else: snapshot_cycle_status = 'FAILURE' _complete_snapshot_cycle(dao, cycle_timestamp, snapshot_cycle_status) if configs.get('email_recipient') is not None: _send_email(cycle_time, cycle_timestamp, snapshot_cycle_status, pipelines, configs.get('sendgrid_api_key'), configs.get('email_sender'), configs.get('email_recipient'))
def _get_project_policies(self): """Get projects from data source. Args: timestamp: The snapshot timestamp. Returns: The project policies. """ project_policies = {} project_policies = ( project_dao.ProjectDao().get_project_policies('projects', self.\ snapshot_timestamp)) return project_policies
def __init__(self): """Initialize.""" # Map the org resource type to the appropriate dao class self._resource_db_lookup = { resource.ResourceType.ORGANIZATION: { 'dao': organization_dao.OrganizationDao(), 'get': 'get_organization', }, resource.ResourceType.FOLDER: { 'dao': folder_dao.FolderDao(), 'get': 'get_folder', }, resource.ResourceType.PROJECT: { 'dao': project_dao.ProjectDao(), 'get': 'get_project', } }
def _retrieve(self): """Retrieve AppEngine applications from GCP. Get all the projects in the current snapshot and retrieve the AppEngine applications for each. Returns: A dict mapping projects with their AppEngine applications: {project_id: application} """ projects = proj_dao.ProjectDao().get_projects(self.cycle_timestamp) apps = {} for project in projects: app = self.api_client.get_app(project.id) if app: apps[project.id] = app return apps
def _retrieve(self): """Retrieve instances from GCP. Get all the projects in the current snapshot and retrieve the compute instances for each. Returns: dict: A map of projects with their instances (list): {project_id: [instances]} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) instances = {} for project in projects: project_instances = self.safe_api_call('get_instances', project.id) if project_instances: instances[project.id] = project_instances return instances
def _retrieve(self): """Retrieve AppEngine applications from GCP. Get all the projects in the current snapshot and retrieve the AppEngine applications for each. Returns: dict: Mapping projects with their AppEngine applications: {project_id: application} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) apps = {} for project in projects: app = self.safe_api_call('get_app', project.id) if app: apps[project.id] = app return apps
def _retrieve(self): """Retrieve forwarding rules from GCP. Get all the projects in the current snapshot and retrieve the compute forwarding rules for each. Returns: A dict mapping projects with their forwarding rules (list): {project_id: [forwarding_rules]} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) forwarding_rules = {} for project in projects: project_fwd_rules = self.safe_api_call('get_forwarding_rules', project.id) if project_fwd_rules: forwarding_rules[project.id] = project_fwd_rules return forwarding_rules
def _retrieve(self): """Retrieve instance groups from GCP. Get all the projects in the current snapshot and retrieve the compute instance groups for each. Returns: A dict mapping projects with their instance groups (list): {project_id: [instance groups]} """ projects = proj_dao.ProjectDao().get_projects(self.cycle_timestamp) igs = {} for project in projects: try: project_igs = self.api_client.get_instance_groups(project.id) if project_igs: igs[project.id] = project_igs except api_errors.ApiExecutionError as e: LOGGER.error(inventory_errors.LoadDataPipelineError(e)) return igs
def __init__(self, global_configs, rule_defs=None, snapshot_timestamp=None): """Initialization. Args: global_configs (dict): Global configurations. rule_defs (list): KE Version rule definition dicts snapshot_timestamp (int): Snapshot timestamp. """ super(KeVersionRuleBook, self).__init__() self._lock = threading.Lock() self.resource_rules_map = {} if not rule_defs: self.rule_defs = {} else: self.rule_defs = rule_defs self.add_rules(rule_defs) self.snapshot_timestamp = snapshot_timestamp self.org_res_rel_dao = org_resource_rel_dao.OrgResourceRelDao( global_configs) self.project_dao = project_dao.ProjectDao(global_configs)
def _retrieve(self): """Retrieve Service Accounts from GCP. Returns: dict: Mapping projects with their Service Accounts: {project_id: application} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) service_accounts_per_project = {} for project in projects: service_accounts = self.api_client.get_service_accounts(project.id) if service_accounts: service_accounts = list(service_accounts) for service_account in service_accounts: # TODO: also retrieve associated IAM policies, see: # https://cloud.google.com/iam/reference/rest/v1/projects.serviceAccounts service_account['keys'] = ( self.api_client.get_service_account_keys( service_account['name'])['keys']) service_accounts_per_project[project.id] = service_accounts return service_accounts_per_project
def _retrieve(self): """Retrieve instance templates from GCP. Get all the projects in the current snapshot and retrieve the compute instance templates for each. Returns: dict: Mapping projects with their instance templates (list): {project_id: [instance templates]} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) instance_templates = {} for project in projects: try: project_instance_templates = ( self.api_client.get_instance_templates(project.id)) if project_instance_templates: instance_templates[project.id] = project_instance_templates except api_errors.ApiExecutionError as e: LOGGER.error(inventory_errors.LoadDataPipelineError(e)) return instance_templates
def _retrieve(self): """Retrieve forwarding rules from GCP. Get all the projects in the current snapshot and retrieve the compute forwarding rules for each. Returns: A dict mapping projects with their forwarding rules (list): {project_id: [forwarding_rules]} """ projects = proj_dao.ProjectDao().get_projects(self.cycle_timestamp) forwarding_rules = {} for project in projects: project_fwd_rules = [] try: project_fwd_rules = self.api_client.get_forwarding_rules( project.id) if project_fwd_rules: forwarding_rules[project.id] = project_fwd_rules except api_errors.ApiExecutionError as e: LOGGER.error(inventory_errors.LoadDataPipelineError(e)) return forwarding_rules
def _retrieve(self): """Retrieve AppEngine applications from GCP. Get all the projects in the current snapshot and retrieve the AppEngine applications for each. Returns: dict: Mapping projects with their AppEngine applications: {project_id: application} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) apps = {} loadable_services = [] loadable_versions = [] loadable_instances = [] for project in projects: app = self.safe_api_call('get_app', project.id) if app: apps[project.id] = app services = self.safe_api_call('list_services', project.id) if services: for service in services: app_id = app.get('id') service_id = service.get('id') loadable_services.append({ 'project_id': project.id, 'app_id': app_id, 'service_id': service_id, 'service': parser.json_stringify(service) }) versions = self.safe_api_call('list_versions', project.id, service_id) if versions: for version in versions: version_id = version.get('id') loadable_versions.append({ 'project_id': project.id, 'app_id': app_id, 'service_id': service_id, 'version_id': version_id, 'version': parser.json_stringify(version) }) instances = self.safe_api_call( 'list_instances', project.id, service_id, version_id) if instances: for instance in instances: instance_id = instance.get('id') parser.json_stringify(instance) loadable_instances.append({ 'project_id': project.id, 'app_id': app_id, 'service_id': service_id, 'version_id': version_id, 'instance_id': instance_id, 'instance': parser.json_stringify(instance) }) return apps, loadable_services, loadable_versions, loadable_instances