def testCanGetApiThatIsAlreadyInitialized(self): my_pipeline_builder = pipeline_builder.PipelineBuilder( FAKE_TIMESTAMP, 'foo_path', mock.MagicMock(), api_map.API_MAP, mock.MagicMock()) my_pipeline_builder.initialized_api_map = {'foo': 'bar'} self.assertEquals('bar', my_pipeline_builder._get_api('foo'))
def _setup_pipeline_builder(self, config_filename): config_path = BASE_PATH + config_filename my_pipeline_builder = pipeline_builder.PipelineBuilder( FAKE_TIMESTAMP, config_path, mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) my_pipeline_builder._get_api = mock.MagicMock() return my_pipeline_builder
def _setup_pipeline_builder(self, config_filename): inventory_configs = file_loader.read_and_parse_file(BASE_PATH + config_filename) my_pipeline_builder = pipeline_builder.PipelineBuilder( FAKE_TIMESTAMP, inventory_configs, mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) my_pipeline_builder._get_api = mock.MagicMock() return my_pipeline_builder
def main(_): """Runs the Inventory Loader. Args: _ (list): args that aren't used Returns: """ del _ inventory_flags = FLAGS.FlagValuesDict() if inventory_flags.get('list_resources'): inventory_util.list_resource_pipelines() sys.exit() _configure_logging(inventory_flags.get('loglevel')) config_path = inventory_flags.get('config_path') if config_path is None: LOGGER.error('Path to pipeline config needs to be specified.') sys.exit() dao_map = _create_dao_map() cycle_time, cycle_timestamp = _start_snapshot_cycle(dao_map.get('dao')) pipeline_builder = builder.PipelineBuilder(cycle_timestamp, config_path, flags, api_map.API_MAP, dao_map) pipelines = pipeline_builder.build() run_statuses = _run_pipelines(pipelines) if all(run_statuses): snapshot_cycle_status = 'SUCCESS' elif any(run_statuses): snapshot_cycle_status = 'PARTIAL_SUCCESS' else: snapshot_cycle_status = 'FAILURE' _complete_snapshot_cycle(dao_map.get('dao'), cycle_timestamp, snapshot_cycle_status) if inventory_flags.get('email_recipient') is not None: payload = { 'email_sender': inventory_flags.get('email_sender'), 'email_recipient': inventory_flags.get('email_recipient'), 'sendgrid_api_key': inventory_flags.get('sendgrid_api_key'), 'cycle_time': cycle_time, 'cycle_timestamp': cycle_timestamp, 'snapshot_cycle_status': snapshot_cycle_status, 'pipelines': pipelines } message = {'status': 'inventory_done', 'payload': payload} notifier.process(message)
def testInitializeApiWithEmptyInitializedApiMap(self, mock_admin): my_pipeline_builder = pipeline_builder.PipelineBuilder( FAKE_TIMESTAMP, 'foo_path', mock.MagicMock(), api_map.API_MAP, mock.MagicMock()) admin_api = my_pipeline_builder._get_api('admin_api') self.assertEquals(1, len(my_pipeline_builder.initialized_api_map.keys())) self.assertTrue('admin_api' in my_pipeline_builder.initialized_api_map) self.assertTrue('AdminDirectoryClient()' in str(admin_api))
def main(_): """Runs the Inventory Loader.""" inventory_flags = FLAGS.FlagValuesDict() if inventory_flags.get('list_resources'): inventory_util.list_resource_pipelines() sys.exit() _configure_logging(inventory_flags.get('loglevel')) config_path = inventory_flags.get('config_path') if config_path is None: LOGGER.error('Path to pipeline config needs to be specified.') sys.exit() dao_map = _create_dao_map() cycle_time, cycle_timestamp = _start_snapshot_cycle(dao_map.get('dao')) pipeline_builder = builder.PipelineBuilder( cycle_timestamp, config_path, flags, api_map.API_MAP, dao_map) pipelines = pipeline_builder.build() run_statuses = _run_pipelines(pipelines) if all(run_statuses): snapshot_cycle_status = 'SUCCESS' elif any(run_statuses): snapshot_cycle_status = 'PARTIAL_SUCCESS' else: snapshot_cycle_status = 'FAILURE' _complete_snapshot_cycle(dao_map.get('dao'), cycle_timestamp, snapshot_cycle_status) if inventory_flags.get('email_recipient') is not None: email_pipeline = ( email_inventory_snapshot_summary_pipeline .EmailInventorySnapshopSummaryPipeline( inventory_flags.get('sendgrid_api_key'))) email_pipeline.run( cycle_time, cycle_timestamp, snapshot_cycle_status, pipelines, inventory_flags.get('email_sender'), inventory_flags.get('email_recipient'))
def main(_): """Runs the Inventory Loader. Args: _ (list): args that aren't used """ del _ inventory_flags = FLAGS.FlagValuesDict() if inventory_flags.get('list_resources'): inventory_util.list_resource_pipelines() sys.exit() forseti_config = inventory_flags.get('forseti_config') if forseti_config is None: LOGGER.error('Path to Forseti Security config needs to be specified.') sys.exit() try: configs = file_loader.read_and_parse_file(forseti_config) except IOError: LOGGER.error('Unable to open Forseti Security config file. ' 'Please check your path and filename and try again.') sys.exit() global_configs = configs.get('global') inventory_configs = configs.get('inventory') log_util.set_logger_level_from_config(inventory_configs.get('loglevel')) dao_map = _create_dao_map(global_configs) cycle_time, cycle_timestamp = _start_snapshot_cycle(dao_map.get('dao')) pipeline_builder = builder.PipelineBuilder( cycle_timestamp, inventory_configs, global_configs, api_map.API_MAP, dao_map) pipelines = pipeline_builder.build() run_statuses = _run_pipelines(pipelines) if all(run_statuses): snapshot_cycle_status = 'SUCCESS' elif any(run_statuses): snapshot_cycle_status = 'PARTIAL_SUCCESS' else: snapshot_cycle_status = 'FAILURE' _complete_snapshot_cycle(dao_map.get('dao'), cycle_timestamp, snapshot_cycle_status) if global_configs.get('email_recipient') is not None: payload = { 'email_sender': global_configs.get('email_sender'), 'email_recipient': global_configs.get('email_recipient'), 'sendgrid_api_key': global_configs.get('sendgrid_api_key'), 'cycle_time': cycle_time, 'cycle_timestamp': cycle_timestamp, 'snapshot_cycle_status': snapshot_cycle_status, 'pipelines': pipelines } message = { 'status': 'inventory_done', 'payload': payload } notifier.process(message)