def test_service_registration(self): """ Verifies that the Pipeline class can correctly register services. The service registration feature allows devices to offer services or script access to other devices in the pipeline. """ # Load the pipeline configuration and create a test pipeline self.config.read_configuration(self.source_data_directory+'/hardware/pipelines/tests/data/pipeline_configuration_valid.yml') test_pipeline = pipeline.Pipeline(self.config.get('pipelines')[0], self.device_manager, self.command_parser) test_service_type = 'waffle' # Create and register a mock service test_service = MagicMock() test_service.id = 'test_service' test_service.type = test_service_type test_pipeline.register_service(test_service) self.assertTrue(test_pipeline.services[test_service_type]['test_service'] is test_service) # Create and register a mock service with the same type but a different ID test_service_2 = MagicMock() test_service_2.id = 'test_service_2' test_service_2.type = test_service_type test_pipeline.register_service(test_service_2) self.assertTrue(test_pipeline.services[test_service_type]['test_service_2'] is test_service_2) # Try to register a third service with the same type and ID as an earlier service test_service_3 = MagicMock() test_service_3.id = 'test_service' test_service_3.type = test_service_type self.assertRaises(pipeline.ServiceAlreadyRegistered, test_pipeline.register_service, test_service_3)
def setUp(self): mock = MagicMock() keypress_event_mock = MagicMock() keypress_event_mock.type = 1 keypress_event_mock.key = 1 userevent_event_mock = MagicMock() userevent_event_mock.type = 3 mock.get_size.return_value = (0, 0) mock.get.return_value = [keypress_event_mock, userevent_event_mock] mock.get_ticks.side_effect = [0, 6000, 0, 5000] mock.Font.return_value = mock mock.render.return_value = mock mock.KEYDOWN = 1 mock.K_RETURN = 1 mock.USEREVENT = 2 mock.font = mock mock.display = mock mock.event = mock mock.time = mock self.game = Game(self.SWITCHES, self.COLUMNS, self.ROWS, self.FPS, self.COUNTDOWN, self.INTERVAL, self.SCORE_INCREMENT, self.LEVEL_INCREMENT, self.INTERVAL_INCREMENT, mock)
def test_nodetype(self): mock_node = MagicMock(id=333) mock_node.type = 'vos:ContainerNode' client = Client() client.get_node = Mock(return_value=mock_node) self.assertEquals('vos:ContainerNode', client._node_type('vos:/somenode')) self.assertTrue(client.isdir('vos:/somenode')) mock_node.type = 'vos:DataNode' self.assertEquals('vos:DataNode', client._node_type('vos:/somenode')) self.assertTrue(client.isfile('vos:/somenode')) # through a link mock_node.type = 'vos:ContainerNode' mock_link_node = Mock(type='vos:LinkNode') mock_link_node.target = 'vos:/somefile' client.get_node = Mock( side_effect=[mock_link_node, mock_node, mock_link_node, mock_node]) self.assertEquals('vos:ContainerNode', client._node_type('vos:/somenode')) self.assertTrue(client.isdir('vos:/somenode')) # through an external link - not sure why the type is DataNode in this case??? mock_link_node.target = '/somefile' client.get_node = Mock(side_effect=[mock_link_node, mock_link_node]) self.assertEquals('vos:DataNode', client._node_type('vos:/somenode')) self.assertTrue(client.isfile('vos:/somenode'))
def test_nodetype(self): mock_node = MagicMock(id=333) mock_node.type = 'vos:ContainerNode' client = Client() client.get_node = Mock(return_value=mock_node) self.assertEquals('vos:ContainerNode', client._node_type('vos:/somenode')) self.assertTrue(client.isdir('vos:/somenode')) mock_node.type = 'vos:DataNode' self.assertEquals('vos:DataNode', client._node_type('vos:/somenode')) self.assertTrue(client.isfile('vos:/somenode')) # through a link mock_node.type = 'vos:ContainerNode' mock_link_node = Mock(type='vos:LinkNode') mock_link_node.target = 'vos:/somefile' client.get_node = Mock(side_effect=[mock_link_node, mock_node, mock_link_node, mock_node]) self.assertEquals('vos:ContainerNode', client._node_type('vos:/somenode')) self.assertTrue(client.isdir('vos:/somenode')) # through an external link - not sure why the type is DataNode in # this case??? mock_link_node.target = '/somefile' client.get_node = Mock(side_effect=[mock_link_node, mock_link_node]) self.assertEquals('vos:DataNode', client._node_type('vos:/somenode')) self.assertTrue(client.isfile('vos:/somenode'))
def test_service_activation_and_lookup(self): """ This tests that the service activation and lookup methods are working as expected. In order for a service to be query-able, it must be active (as specified by the configuration for the session using the pipeline). """ # Create a test pipeline to work with self.config.read_configuration(self.source_data_directory+'/hardware/pipelines/tests/data/pipeline_configuration_valid.yml') test_pipeline = pipeline.Pipeline(self.config.get('pipelines')[0], self.device_manager, self.command_parser) # Create some mock services and register them with the pipeline test_tracker_service = MagicMock() test_tracker_service.id = "sgp4" test_tracker_service.type = "tracker" test_pipeline.register_service(test_tracker_service) test_logger_service = MagicMock() test_logger_service.id = "basic" test_logger_service.type = "logger" test_pipeline.register_service(test_logger_service) test_cornballer_service = MagicMock() test_cornballer_service.id = "deluxe" test_cornballer_service.type = "cornballer" test_pipeline.register_service(test_cornballer_service) # Define a callback to continue the test after the schedule has been loaded def continue_test(reservation_schedule): # Load a reservation that specifies some active services test_reservation_config = self._load_reservation_config(reservation_schedule, 'RES.5') # Create a new session test_session = session.Session(test_reservation_config, test_pipeline, self.command_parser) # Register the session with the pipeline; this will also activate the reservation's services test_pipeline.register_session(test_session) # Make sure the active services can be loaded self.assertTrue(test_pipeline.load_service("tracker") is test_tracker_service) self.assertTrue(test_pipeline.load_service("logger") is test_logger_service) self.assertRaises(pipeline.ServiceTypeNotFound, test_pipeline.load_service, "cornballer") # Add an unknown active service type to the reservation configuration and re-register it test_pipeline.current_session = None test_reservation_config['active_services']['nonexistent_type'] = "nonexistent_service" test_session = session.Session(test_reservation_config, test_pipeline, self.command_parser) self.assertRaises(pipeline.ServiceInvalid, test_pipeline.register_session, test_session) # Add an unknown active service ID to the reservation configuration and re-register it test_pipeline.current_session = None test_reservation_config['active_services'].pop("nonexistent_type", None) test_reservation_config['active_services']['tracker'] = "nonexistent_service" test_session = session.Session(test_reservation_config, test_pipeline, self.command_parser) self.assertRaises(pipeline.ServiceInvalid, test_pipeline.register_session, test_session) # Load up a test schedule to work with schedule_update_deferred = self._load_test_schedule() schedule_update_deferred.addCallback(continue_test) return schedule_update_deferred
def test_run(self, strp_fun): """Test running the ReqMan """ socket = self.context.socket.return_value socks = ((socket, server.POLLIN), ) cnt = [0] def side_effect(timeout): del timeout time.sleep(0.1) cnt[0] += 1 return socks ping = MagicMock() ping.type = "ping" req = MagicMock() req.type = "request" req.data = { "type": "scanline", "satellite": MagicMock(), "utctime": MagicMock() } notice = MagicMock() notice.type = "notice" notice.data = {"type": "scanline"} unknown = MagicMock() msgs = [ping, req, notice, unknown] def msg(*args, **kwargs): if "rawstr" in kwargs: return msgs[cnt[0] % len(msgs)] else: return MagicMock() server.Message.side_effect = msg self.reqman.pong = MagicMock() self.reqman.notice = MagicMock() self.reqman.scanline = MagicMock() self.reqman.unknown = MagicMock() sys.modules["zmq"].Poller.return_value.poll.side_effect = side_effect self.reqman.start() time.sleep(0.4) self.reqman.stop() self.reqman.join() self.reqman.pong.assert_called_once_with() self.reqman.notice.assert_called_once_with(notice) self.reqman.scanline.assert_called_once_with(req) self.reqman.unknown.assert_called_once_with(unknown) sys.modules["zmq"].Poller.return_value.side_effect = None server.Message.side_effect = None
def test_run(self, strp_fun): """Test running the ReqMan """ socket = self.context.socket.return_value socks = ((socket, server.POLLIN),) cnt = [0] def side_effect(timeout): del timeout time.sleep(0.1) cnt[0] += 1 return socks ping = MagicMock() ping.type = "ping" req = MagicMock() req.type = "request" req.data = {"type": "scanline", "satellite": MagicMock(), "utctime": MagicMock()} notice = MagicMock() notice.type = "notice" notice.data = {"type": "scanline"} unknown = MagicMock() msgs = [ping, req, notice, unknown] def msg(*args, **kwargs): if "rawstr" in kwargs: return msgs[cnt[0] % len(msgs)] else: return MagicMock() server.Message.side_effect = msg self.reqman.pong = MagicMock() self.reqman.notice = MagicMock() self.reqman.scanline = MagicMock() self.reqman.unknown = MagicMock() sys.modules["zmq"].Poller.return_value.poll.side_effect = side_effect self.reqman.start() time.sleep(0.4) self.reqman.stop() self.reqman.join() self.reqman.pong.assert_called_once_with() self.reqman.notice.assert_called_once_with(notice) self.reqman.scanline.assert_called_once_with(req) self.reqman.unknown.assert_called_once_with(unknown) sys.modules["zmq"].Poller.return_value.side_effect = None server.Message.side_effect = None
def test_session_termination(self): """ This test checks that the session coordinator can correctly clean up sessions once they expire (as determined by their reservation timestamp range). """ # Load in some valid configuration and set the defaults using validate_configuration() self.config.read_configuration(self.source_data_directory+'/core/tests/data/test_config_basic.yml') self.config.read_configuration(self.source_data_directory+'/hardware/pipelines/tests/data/pipeline_configuration_valid.yml') self.config.validate_configuration() # Setup the pipeline manager test_pipelines = pipeline_manager.PipelineManager(self.device_manager, self.command_parser) # Create the expected mock services test_tracker_service = MagicMock() test_tracker_service.id = "sgp4" test_tracker_service.type = "tracker" test_pipelines.pipelines['test_pipeline3'].register_service(test_tracker_service) test_logger_service = MagicMock() test_logger_service.id = "basic" test_logger_service.type = "logger" test_pipelines.pipelines['test_pipeline3'].register_service(test_logger_service) # Setup the schedule manager test_schedule = schedule.ScheduleManager(self.source_data_directory+'/sessions/tests/data/test_schedule_valid.json') # Initialize the session coordinator session_coordinator = coordinator.SessionCoordinator(test_schedule, self.device_manager, test_pipelines, self.command_parser) def continue_test(loaded_schedule): # Activate the test reservation session_coordinator._check_for_new_reservations() self.assertTrue('RES.6' in session_coordinator.active_sessions) res6 = session_coordinator.active_sessions['RES.6'] res6.kill_session = MagicMock() # Change the expiration time on the reservation to make it expire res6.configuration['time_end'] = 1383264000 # Kill the expired session session_coordinator._check_for_finished_sessions() res6.kill_session.assert_called_once_with() self.assertTrue('RES.6' in session_coordinator.closed_sessions and 'RES.6' not in session_coordinator.active_sessions) # Update the schedule to load in the reservations schedule_update_deferred = test_schedule.update_schedule() schedule_update_deferred.addCallback(continue_test) return schedule_update_deferred
def test_session_startup_pipeline_setup_command_errors(self): """ Tests that the Session class correctly handles fatal pipeline setup command errors when starting a new session. """ # First create a pipeline that contains invalid pipeline setup commands (to force an error) test_pipeline = pipeline.Pipeline(self.config.get('pipelines')[2], self.device_manager, self.command_parser) # Create the expected mock services test_tracker_service = MagicMock() test_tracker_service.id = "sgp4" test_tracker_service.type = "tracker" test_pipeline.register_service(test_tracker_service) test_logger_service = MagicMock() test_logger_service.id = "basic" test_logger_service.type = "logger" test_pipeline.register_service(test_logger_service) # Define a callback to check the results of the session start procedure def check_results(session_start_failure, test_session): # Check if the correct error was generated (caused by a failed pipeline setup command) self.assertTrue(isinstance(session_start_failure.value, parser.CommandFailed)) # Make sure the session is not active self.assertTrue(not test_session.is_active) # Make sure that the pipeline was freed after the error self.assertTrue(not test_pipeline.is_active) for temp_device in test_pipeline.devices: # Try to lock the devices, if this fails then something wasn't unlocked correctly test_pipeline.devices[temp_device].reserve_device() # Define a callback to continue the test after the schedule has been loaded def continue_test(reservation_schedule): # Find the reservation that we want to test with test_reservation_config = self._load_reservation_config(reservation_schedule, 'RES.5') # Create a new session test_session = session.Session(test_reservation_config, test_pipeline, self.command_parser) # Start the session session_start_deferred = test_session.start_session() session_start_deferred.addErrback(check_results, test_session) return session_start_deferred # Now load up a test schedule to work with schedule_update_deferred = self._load_test_schedule() schedule_update_deferred.addCallback(continue_test) return schedule_update_deferred
def setUp(self): mock = MagicMock() keypress_event_mock = MagicMock() keypress_event_mock.type = 1 keypress_event_mock.key = 1 userevent_event_mock = MagicMock() userevent_event_mock.type = 3 mock.get_size.return_value = (0, 0) mock.get.return_value = [keypress_event_mock, userevent_event_mock] mock.get_ticks.side_effect = [0, 6000, 0, 5000] mock.Font.return_value = mock mock.render.return_value = mock mock.KEYDOWN = 1 mock.K_RETURN = 1 mock.USEREVENT = 2 mock.font = mock mock.display = mock mock.event = mock mock.time = mock self.game = Game( self.SWITCHES, self.COLUMNS, self.ROWS, self.SHAPES_NEXT_COUNT, self.FPS, self.COUNTDOWN, self.INTERVAL, self.SCORE_INCREMENTS, self.LEVEL_INCREMENT, self.INTERVAL_INCREMENT, self.RGB_MATRIX_HARDWARE, self.RGB_MATRIX_ROWS, self.RGB_MATRIX_CHAIN_LENGTH, self.RGB_MATRIX_PARALLEL, self.RGB_MATRIX_PWM_BITS, self.RGB_MATRIX_BRIGHTNESS, self.RGB_MATRIX_LSB_NANOSECONDS, self.RGB_MATRIX_LED_SLOWDOWN_GPIO, self.RGB_MATRIX_DISABLE_HARDWARE_PULSING, self.RGB_MATRIX_RGB_SEQUENCE, mock )
def test_post_new_attachment(self, mock_fields): request = DummyRequest(['/attachment']) request.method = 'POST' request.content = 'mocked' attachment_id = 'B5B4ED80AC3B894523D72E375DACAA2FC6606C18EDF680FE95903086C8B5E14A' _file = MagicMock() _file.value = 'some mocked value' _file.type = 'some mocked type' _file.filename = 'filename.txt' mock_fields.return_value = {'attachment': _file} when(self.mail_service).save_attachment( 'some mocked value', 'some mocked type').thenReturn(defer.succeed(attachment_id)) d = self.web.get(request) def assert_response(_): self.assertEqual(201, request.code) self.assertEqual('/attachment/%s' % attachment_id, request.headers['Location']) response_json = { 'ident': attachment_id, 'content-type': 'some mocked type', 'name': 'filename.txt', 'size': 17, 'encoding': 'base64' } self.assertEqual(response_json, json.loads(request.written[0])) verify(self.mail_service).save_attachment('some mocked value', 'some mocked type') d.addCallback(assert_response) return d
def test_post_attachment_fails(self, mock_fields): request = DummyRequest(['/attachment']) request.method = 'POST' request.content = 'mocked' _file = MagicMock() _file.value = 'some mocked value' _file.type = 'some mocked type' mock_fields.return_value = {'attachment': _file} when(self.mail_service).save_attachment('some mocked value', 'some mocked type').thenReturn( defer.fail(Exception)) d = self.web.get(request) def assert_response(_): self.assertEqual(500, request.code) self.assertFalse( request.responseHeaders.hasHeader('Location'.lower())) self.assertIn("message", json.loads(request.written[0])) verify(self.mail_service).save_attachment('some mocked value', 'some mocked type') d.addCallback(assert_response) return d
def test_walk_WalkModeNode(self): nw = GraphDSLNodeWalker(self.graphmgr) node = MagicMock() node.type.value = 'type' node.direction.value = 'dir' node.begin.value = 'begin' node.end.value = 'end' nw.walk_WalkModeNode(node, []) self.assertEqual(node.type, 'type') self.assertEqual(node.direction, 'dir') self.assertEqual(node.begin, 'begin') self.assertEqual(node.end, 'end') node = MagicMock() node.type = None node.direction = None node.begin = None node.end = None nw.walk_WalkModeNode(node, []) self.assertIsNone(node.type) self.assertIsNone(node.direction) self.assertIsNone(node.begin) self.assertIsNone(node.end)
def test_reap_tmp_images(self, _os_datastore_path, _uuid): """ Test that stray images are found and deleted by the reaper """ def _fake_ds_folder(datastore, folder): return "%s__%s" % (datastore, folder) ds = MagicMock() ds.id = "dsid" ds.type = DatastoreType.EXT3 # In a random transient directory, set up a directory to act as the # tmp images folder and to contain a stray image folder with a file. tmpdir = file_util.mkdtemp(delete=True) tmp_images_folder = _fake_ds_folder(ds.id, TMP_IMAGE_FOLDER_NAME) tmp_images_dir = os.path.join(tmpdir, tmp_images_folder) tmp_image_dir = os.path.join(tmp_images_dir, "stray_image") os.mkdir(tmp_images_dir) os.mkdir(tmp_image_dir) (fd, path) = tempfile.mkstemp(prefix='strayimage_', dir=tmp_image_dir) self.assertTrue(os.path.exists(path)) def _fake_os_datastore_path(datastore, folder): return os.path.join(tmpdir, _fake_ds_folder(datastore, folder)) _os_datastore_path.side_effect = _fake_os_datastore_path ds_manager = MagicMock() ds_manager.get_datastores.return_value = [ds] image_manager = EsxImageManager(self.vim_client, ds_manager) image_manager.reap_tmp_images() # verify stray image is deleted self.assertFalse(os.path.exists(path))
def test_HostapdConfGenerator_getRadiusOptions(): wifiIface = ConfigWifiIface() wifiIface.ssid = "Paradrop" wifiIface.maxassoc = 200 wifiIface.wmm = True wifiIface.ifname = "wlan0" wifiIface.auth_server = "10.42.0.1" wifiIface.auth_secret = "secret" wifiIface.acct_server = "10.42.0.1" wifiIface.acct_secret = "secret" wifiDevice = MagicMock() wifiDevice.country = "US" wifiDevice.hwmode = "11a" wifiDevice.channel = 36 wifiDevice.beacon_int = 100 wifiDevice.rts = -1 wifiDevice.frag = -1 interface = MagicMock() interface.type = "bridge" interface.config_ifname = "br-lan" generator = HostapdConfGenerator(wifiIface, wifiDevice, interface) options = generator.getRadiusOptions() print(options)
def add_change(op, dns_name, rtype, ttl, identifier, weight): if op == 'CREATE': x = MagicMock(weight=weight, identifier=identifier) x.name = "myapp.example.org." x.type = "CNAME" records[identifier] = x return MagicMock(name='change')
def test_HostapdConfGenerator_getMainOptions(): wifiIface = MagicMock() wifiIface.ssid = "Paradrop" wifiIface.maxassoc = 200 wifiIface.wmm = True wifiIface._ifname = "wlan0" wifiDevice = MagicMock() wifiDevice.country = "US" wifiDevice.hwmode = "11a" wifiDevice.channel = 36 wifiDevice.beacon_int = 100 wifiDevice.rts = -1 wifiDevice.frag = -1 interface = MagicMock() interface.type = "bridge" interface.config_ifname = "br-lan" generator = HostapdConfGenerator(wifiIface, wifiDevice, interface) options = generator.getMainOptions() print(options) assert ("interface", "wlan0") in options assert ("bridge", "br-lan") in options assert ("ssid", "Paradrop") in options assert ("country_code", "US") in options assert ("ieee80211d", 1) in options assert ("hw_mode", "a") in options assert ("beacon_int", 100) in options assert ("max_num_sta", 200) in options assert ("rts_threshold", -1) in options assert ("fragm_threshold", -1) in options assert ("wmm_enabled", 1) in options
def _assert_correct_strategy(self, conversion_type, strategy_class, expected_converter_type=None, and_also=None): # given: converter = MagicMock('converter') converter.type = MagicMock(return_value=DataType.UNDEFINED) column_spec = _mock_column_spec(field_name='product.product_id', main_category='product_type', converter=converter, conversion_type=conversion_type) # when: strategy: CellConversion = conversion_strategy.determine_strategy( column_spec) # then: self.assertIsInstance(strategy, strategy_class) self.assertEqual('product.product_id', strategy.field) # and: if expected_converter_type is None: self.assertEqual(converter, strategy.converter) else: self.assertIsInstance(strategy.converter, expected_converter_type) # and: if and_also is not None: and_also(strategy)
def test_delete_vm(self, stray_file, expected, islink, isdir): datastore = MagicMock() datastore.id = "ds1" datastore.type = 1 vm_resource = MagicMock() vm_resource.datastore = "ds1" datastores = [datastore] """Test deleting a VM""" self.vm_manager.get_resource = MagicMock(return_value=vm_resource) self.vm_manager._ds_manager.get_datastores = MagicMock( return_value=datastores) self.vm_manager.vim_client.delete_vm = MagicMock( return_value="/vmfs/volumes/fake/vm_vm_foo/vm_foo") self.vm_manager._logger = MagicMock() self.vm_manager.vim_client.delete_file = MagicMock() with patch.object(os, "listdir", return_value=[stray_file]): self.vm_manager.delete_vm("vm_foo") self.vm_manager.get_resource.assert_called_once_with("vm_foo") self.vm_manager._ds_manager.get_datastores.assert_called_once_with() self.vm_manager.vim_client.delete_vm.assert_called_once_with( "vm_foo", False) self.vm_manager.vim_client.delete_file.assert_called_once_with( "/vmfs/volumes/fake/vm_vm_foo") self.vm_manager._logger.info.assert_has_calls(call(expected)) self.vm_manager._logger.warning.assert_called_once_with( "Force delete vm directory /vmfs/volumes/fake/vm_vm_foo")
def test_socket_set_with_acceptable_socket(self): sock = MagicMock() sock.family = socket.AF_INET sock.type = socket.SOCK_STREAM sock.setblocking = MagicMock() self.channel._socket_set(sock) self.assertTrue(self.channel._socket is sock) sock.setblocking.assert_called_once_with(False)
def test_hook_finished(self, m_log): self.scan.date = "scan_date" self.fw.scan = self.scan self.fw.file.sha256 = "sha256" self.fw.name = "filename" self.fw.file.timestamp_first_scan = "ts_first_scan" self.fw.file.timestamp_last_scan = "ts_last_scan" self.fw.file.size = "size" pr1, pr2 = MagicMock(), MagicMock() self.fw.probe_results = [pr1, pr2] pr1.name = "probe1" pr1.type = "antivirus" pr1.status = "status1" pr1.duration = "duration1" pr1.results = "results1" pr2.name = "probe2" pr2.type = "metadata" pr2.status = "status2" pr2.duration = None pr2.results = "results2" pr1.get_details.return_value = pr1 pr2.get_details.return_value = pr2 self.fw.hook_finished() expected1 = "[files_results] date: %s file_id: %s scan_id: %s " expected1 += "status: %s probes: %s submitter: %s submitter_id: %s" call1 = call(expected1, 'scan_date', self.fw.external_id, self.fw.scan.external_id, 'Clean', 'probe1, probe2', 'unknown', 'undefined') expected2 = '[av_results] date: %s av_name: "%s" ' expected2 += "status: %d virus_name: \"%s\" file_id: %s " expected2 += "file_sha256: %s scan_id: %s duration: %f " expected2 += "submitter: %s submitter_id: %s" call2 = call(expected2, 'scan_date', 'probe1', 'status1', 'results1', self.fw.external_id, 'sha256', self.fw.scan.external_id, 'duration1', 'unknown', 'undefined') expected3 = '[probe_results] date: %s name: "%s" ' expected3 += "status: %d file_sha256: %s file_id: %s " expected3 += "duration: %f submitter: %s submitter_id: %s" call3 = call(expected3, 'scan_date', 'probe2', 'status2', self.fw.external_id, 'sha256', 0, 'unknown', 'undefined') m_log.info.assert_has_calls([call1]) m_log.info.assert_has_calls([call2]) m_log.info.assert_has_calls([call3])
def helper_generate_service_info(self): info = MagicMock() info.address = self.addr info.port = self.port info.name = self.name info.hostname = self.hostname info.type = self.type info.txt = self.txt return info
def test_custom_brain_fails(self): self.m_simconf.brain_model.is_custom = True model = MagicMock() model.name = 'model_brain' model.path = 'brains' model.type = 'brains/brain.zip' self.launcher._storageClient.get_model.return_value = None self.assertRaises(NRPServicesGeneralException, self.launcher._load_brain)
def helper_build_info(self): info = MagicMock() info.address = socket.inet_aton(self.address) info.port = self.port info.type = self.type info.name = self.name info.server = self.server info.properties = {} return info
def _get_vl_mock(self, plugin, plugin_instance, type, type_instance, host="MockHost", values=[]): vl = MagicMock() vl.plugin = plugin vl.plugin_instance = plugin_instance vl.type = type vl.type_instance = type_instance vl.host = host vl.values = values return vl
def _prepare_master_node(self): node = MagicMock() node.properties = { 'configuration': { 'blueprint_file_name': 'kubernetes.conf' } } managed_master_node = MagicMock() managed_master_node.type = tasks.RELATIONSHIP_TYPE_MANAGED_BY_MASTER managed_master_node.target.node = node _ctx = MockCloudifyContext( node_id="test_id", node_name="test_name", deployment_id="test_name", properties={ 'definition': { 'apiVersion': 'v1', 'metadata': 'c', 'spec': 'd', 'file': {} }, '_api_mapping': { 'create': { 'payload': 'api_payload_version', 'api': 'api_client_version', 'method': 'create' }, 'read': { 'api': 'api_client_version', 'method': 'read' }, 'delete': { 'api': 'api_client_version', 'method': 'delete' } }, 'options': { 'first': 'second' } }, runtime_properties={ 'kubernetes': { 'metadata': { 'name': "kubernetes_id" } } }, relationships=[managed_master_node], operation={'retry_number': 0} ) _ctx._node.type = 'cloudify.nodes.Root' current_ctx.set(_ctx) return managed_master_node, _ctx
def get_datastore_mock(self, datastores): result = [] for datastore in datastores: mock = MagicMock() mock.name = datastore[0] mock.id = datastore[1] mock.type = datastore[2] mock.local = datastore[3] result.append(mock) return result
def test_get_model_successfully(self, mocked_get): client = StorageClient.StorageClient() model = MagicMock() model.name = 'model_brain' model.type = ResourceType.BRAIN res = client.get_model( "fakeToken", "fakeContextId", model) self.assertEqual(res, 'Test')
def _prepare_master_node(self, api_mapping=None, external=False, create=False): node = MagicMock() node.properties = { 'configuration': { 'blueprint_file_name': 'kubernetes.conf' } } managed_master_node = MagicMock() managed_master_node.type = RELATIONSHIP_TYPE_MANAGED_BY_MASTER managed_master_node.target.node = node properties = { 'use_external_resource': external, 'validate_resource_status': True, 'definition': { 'apiVersion': 'v1', 'metadata': 'c', 'spec': 'd' }, 'options': { 'first': 'second' } } if api_mapping: properties['api_mapping'] = api_mapping _ctx = MockCloudifyContext( node_id="test_id", node_name="test_name", deployment_id="test_name", properties=properties, runtime_properties=DirtyTrackingDict( {} if create else {'kubernetes': { 'metadata': { 'name': "kubernetes_id" } }}), relationships=[managed_master_node], operation={'retry_number': 0}) _ctx.node.type_hierarchy = \ ['cloudify.nodes.Root', 'cloudify.kubernetes.resources.ResourceBase', 'cloudify.kubernetes.resources.BlueprintDefinedResource', 'cloudify.kubernetes.resources.Pod'] current_ctx.set(_ctx) return managed_master_node, _ctx
def test_generate_neurons(self): neurons = MagicMock() neurons.name = "neuron_name" neurons.start = 0 neurons.type = ExperimentPopulationInfo.TYPE_ENTIRE_POPULATION self.assertEqual(StructuredTransferFunction._generate_neurons(neurons), "nrp.brain.neuron_name") neurons.type = ExperimentPopulationInfo.TYPE_POPULATION_SLICE neurons.stop = neurons.start + 1 self.assertEqual(StructuredTransferFunction._generate_neurons(neurons), "nrp.brain.neuron_name[0]") neurons.stop = 100 neurons.step = 5 self.assertEqual(StructuredTransferFunction._generate_neurons(neurons), "nrp.brain.neuron_name[slice(0,100,5)]") neurons.type = ExperimentPopulationInfo.TYPE_POPULATION_LISTVIEW neurons.ids = ['1', '2', '3'] self.assertEqual(StructuredTransferFunction._generate_neurons(neurons), "nrp.brain.neuron_name[[1, 2, 3]]")
def test_get_model_connection_error(self, mocked_get): client = StorageClient.StorageClient() mocked_get.side_effect = requests.exceptions.ConnectionError() model = MagicMock() model.name = 'model_brain' model.type = ResourceType.BRAIN with self.assertRaises(requests.exceptions.ConnectionError) as context: client.get_model( "fakeToken", "fakeContextId", model, ) self.assertEqual(requests.exceptions.ConnectionError, context.expected)
def test_get_custom_model_failed(self, mocked_get): client = StorageClient.StorageClient() model = MagicMock() model.name = 'model_brain' model.type = ResourceType.BRAIN with self.assertRaises(Exception) as context: client.get_model( "fakeToken", "fakeContextId", model ) self.assertTrue( 'Failed to communicate with the storage server, status code 404' in context.exception)
async def test_ignore_sieve(mock_db): from plugins.core import ignore setup_db(mock_db) sess = mock_db.session() ignore.add_ignore(sess, 'testconn', '#chan', '*!*@host') _hook = MagicMock() bot = MagicMock() event = MagicMock() _hook.type = "irc_raw" assert (await ignore.ignore_sieve(bot, event, _hook)) is event _hook.type = "command" event.triggered_command = "unignore" assert (await ignore.ignore_sieve(bot, event, _hook)) is event event.triggered_command = "somecommand" event.mask = None assert (await ignore.ignore_sieve(bot, event, _hook)) is event event.conn.name = "testconn" event.chan = "#chan" event.mask = "nick!user@host" assert (await ignore.ignore_sieve(bot, event, _hook)) is None event.conn.name = "testconn1" assert (await ignore.ignore_sieve(bot, event, _hook)) is event
def test_security(self): """ Does the set_security method get called correctly? """ args = MagicMock() args.type = 'open' ap_module = MagicMock() ap_instance = MagicMock(spec=apcommand.accesspoints.atheros.AtherosAR5KAP) ap_module.AtherosAR5KAP.return_value = ap_instance error_message = 'security setting error' ap_instance.set_security.side_effect = Exception(error_message) with patch('apcommand.accesspoints.atheros', ap_module): self.sub_command.security(args) ap_instance.set_security.assert_called_with(security_type=args.type) return
def test_custom_brain_succeeds(self, mock_zip): model = MagicMock() model.name = 'model_brain' model.path = 'brains/brain.zip' model.type = 0x11000003 self.launcher._storageClient.get_models.return_value = [model] self.launcher._storageClient.get_model.return_value = r'awesome brain data' self.m_simconf.brain_model.zip_path.rel_path = 'brains/brain.zip' self.m_simconf.brain_model.zip_path.abs_path = '/my/experiment/brains/brain.zip' with patch("__builtin__.open", mock_open(read_data='bibi')): with patch( "hbp_nrp_cleserver.server.CLEGazeboSimulationAssembly.os"): self.launcher._extract_brain_zip() self.m_ziputil.extractall.assert_called_once()
def test_delete_vm_wrong_state(self, state): datastore = MagicMock() datastore.id = "ds1" datastore.type = 1 vm_resource = MagicMock() vm_resource.datastore = "ds1" datastores = [datastore] self.vm_manager.get_resource = MagicMock(return_value=vm_resource) self.vm_manager._ds_manager.get_datastores = MagicMock(return_value=datastores) runtime = MagicMock() runtime.powerState = state vm = MagicMock() vm.runtime = runtime self.vm_manager.vim_client.get_vm = MagicMock(return_value=vm) self.assertRaises(VmPowerStateException, self.vm_manager.delete_vm, "vm_foo")
def test_create_type_mappings_first_error_then_custom(self, input_mock): self.handler.issue_types = MagicMock() ticket = MagicMock() ticket_type = "bug,ci" ticket.type = ticket_type issue_type = MagicMock() issue_type.name = "Story" issue_types = [issue_type] self.handler.issue_types.return_value = issue_types input_mock.side_effect = ["Error", "N", "Story", "Story"] mappings = self.handler.create_type_mappings([ticket]) self.assertEqual(mappings, {"bug": "Story", "ci": "Story"}) self.assertEqual(input_mock.call_count, 4)
def test_hook_finished_submitter_id(self, m_log): self.scan.date = "scan_date" payload = {'submitter_id': "my_kiosk_id"} fw = module.FileKiosk(self.file, self.name, payload) fw.scan = self.scan fw.file.sha256 = "sha256" fw.name = "filename" fw.file.timestamp_first_scan = "ts_first_scan" fw.file.timestamp_last_scan = "ts_last_scan" fw.file.size = "size" pr1 = MagicMock() fw.probe_results = [pr1] pr1.name = "probe1" pr1.type = "antivirus" pr1.status = "status1" pr1.duration = "duration1" pr1.results = "results1" pr1.get_details.return_value = pr1 fw.hook_finished() expected1 = "[files_results] date: %s file_id: %s scan_id: %s " expected1 += "status: %s probes: %s submitter: %s submitter_id: %s" call1 = call(expected1, 'scan_date', fw.external_id, fw.scan.external_id, 'Clean', 'probe1', 'kiosk', 'my_kiosk_id') expected2 = '[av_results] date: %s av_name: "%s" ' expected2 += "status: %d virus_name: \"%s\" file_id: %s " expected2 += "file_sha256: %s scan_id: %s duration: %f " expected2 += "submitter: %s submitter_id: %s" call2 = call(expected2, 'scan_date', 'probe1', 'status1', 'results1', fw.external_id, 'sha256', fw.scan.external_id, 'duration1', 'kiosk', 'my_kiosk_id') m_log.info.assert_has_calls([call1]) m_log.info.assert_has_calls([call2])
def _prepare_master_node(self, api_mapping=None): node = MagicMock() node.properties = { 'configuration': { 'blueprint_file_name': 'kubernetes.conf' } } managed_master_node = MagicMock() managed_master_node.type = RELATIONSHIP_TYPE_MANAGED_BY_MASTER managed_master_node.target.node = node properties = { 'definition': { 'apiVersion': 'v1', 'metadata': 'c', 'spec': 'd' }, 'options': { 'first': 'second' } } if api_mapping: properties['api_mapping'] = api_mapping _ctx = MockCloudifyContext(node_id="test_id", node_name="test_name", deployment_id="test_name", properties=properties, runtime_properties={ 'kubernetes': { 'metadata': { 'name': "kubernetes_id" } } }, relationships=[managed_master_node], operation={'retry_number': 0}) _ctx._node.type = 'cloudify.kubernetes.resources.Pod' current_ctx.set(_ctx) return managed_master_node, _ctx
def test_reap_tmp_images(self, _allow_grace_period, _os_datastore_path, _uuid): """ Test that stray images are found and deleted by the reaper """ def _fake_ds_folder(datastore, folder): return "%s/%s" % (datastore, folder) ds = MagicMock() ds.id = "dsid" ds.type = DatastoreType.EXT3 # In a random transient directory, set up a directory to act as the # tmp images folder and to contain a stray image folder with a file. tmpdir = file_util.mkdtemp(delete=True) tmp_ds_dir = os.path.join(tmpdir, ds.id) os.mkdir(tmp_ds_dir) tmp_image_dir = os.path.join( tmp_ds_dir, compond_path_join(TMP_IMAGE_FOLDER_NAME_PREFIX, "stray_image")) os.mkdir(tmp_image_dir) (fd, path) = tempfile.mkstemp(prefix='strayimage_', dir=tmp_image_dir) self.assertTrue(os.path.exists(path)) def _fake_os_datastore_path(datastore, folder): return os.path.join(tmpdir, _fake_ds_folder(datastore, folder)) _os_datastore_path.side_effect = _fake_os_datastore_path ds_manager = MagicMock() ds_manager.get_datastores.return_value = [ds] image_manager = EsxImageManager(self.vim_client, ds_manager) if not _allow_grace_period: image_manager.REAP_TMP_IMAGES_GRACE_PERIOD = 0.0 time.sleep(0.1) image_manager.reap_tmp_images() if _allow_grace_period: # verify stray image is not deleted due to grace period self.assertTrue(os.path.exists(path)) else: # verify stray image is deleted self.assertFalse(os.path.exists(path))
def test_save_request_new_request_no_parent(self): """ case where the request is new and has no parent """ fetchone_returns = [None, {'id': 42}] def fetchone_side_effect(): result = fetchone_returns.pop(0) return result request = MagicMock() request.parent_db_id = None request.type = "request type" request.method = "METHOD" request.url = "my url" request.referer = "some referrer" request.redirects = "some redirection" request.data = "some data" request.cookies = {} request.http_auth = None request.out_of_scope = False request.trigger = None request.user_output = [] self.cursor_mock.fetchone.side_effect = fetchone_side_effect self.db.save_request(request) self.assertEqual(self.cursor_mock.execute.call_count, 3) self.assertEqual( self.cursor_mock.execute.call_args_list[0], call( 'SELECT * FROM request WHERE type=? AND method=? AND url=? AND http_auth=? AND data=? AND trigger=?', ("request type", "METHOD", "my url", "", "some data", ""))) self.assertEqual( self.cursor_mock.execute.call_args_list[1], call( 'INSERT INTO request (id_parent, type, method, url, referer, redirects, data, cookies, http_auth, out_of_scope, trigger, user_output) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)', (None, "request type", "METHOD", "my url", "some referrer", "some redirection", "some data", "[]", "", 0, "", ""))) self.assertEqual(self.cursor_mock.execute.call_args_list[2], call("SELECT last_insert_rowid() AS id"))
def test_reap_tmp_images(self, _allow_grace_period, _os_datastore_path, _uuid): """ Test that stray images are found and deleted by the reaper """ def _fake_ds_folder(datastore, folder): return "%s/%s" % (datastore, folder) ds = MagicMock() ds.id = "dsid" ds.type = DatastoreType.EXT3 # In a random transient directory, set up a directory to act as the # tmp images folder and to contain a stray image folder with a file. tmpdir = file_util.mkdtemp(delete=True) tmp_ds_dir = os.path.join(tmpdir, ds.id) os.mkdir(tmp_ds_dir) tmp_image_dir = os.path.join(tmp_ds_dir, compond_path_join(TMP_IMAGE_FOLDER_NAME_PREFIX, "stray_image")) os.mkdir(tmp_image_dir) (fd, path) = tempfile.mkstemp(prefix='strayimage_', dir=tmp_image_dir) self.assertTrue(os.path.exists(path)) def _fake_os_datastore_path(datastore, folder): return os.path.join(tmpdir, _fake_ds_folder(datastore, folder)) _os_datastore_path.side_effect = _fake_os_datastore_path ds_manager = MagicMock() ds_manager.get_datastores.return_value = [ds] image_manager = EsxImageManager(self.vim_client, ds_manager) if not _allow_grace_period: image_manager.REAP_TMP_IMAGES_GRACE_PERIOD = 0.0 time.sleep(0.1) image_manager.reap_tmp_images() if _allow_grace_period: # verify stray image is not deleted due to grace period self.assertTrue(os.path.exists(path)) else: # verify stray image is deleted self.assertFalse(os.path.exists(path))
def test_post_attachment_fails(self, mock_fields): request = DummyRequest(['/attachment']) request.method = 'POST' request.content = 'mocked' _file = MagicMock() _file.value = 'some mocked value' _file.type = 'some mocked type' mock_fields.return_value = {'attachment': _file} when(self.mail_service).save_attachment('some mocked value', 'some mocked type').thenReturn(defer.fail(Exception)) d = self.web.get(request) def assert_response(_): self.assertEqual(500, request.code) self.assertFalse(request.responseHeaders.hasHeader('Location'.lower())) self.assertIn("message", json.loads(request.written[0])) verify(self.mail_service).save_attachment('some mocked value', 'some mocked type') d.addCallback(assert_response) return d
def test_post_new_attachment(self, mock_fields): request = DummyRequest(['/attachment']) request.method = 'POST' request.content = 'mocked' attachment_id = 'B5B4ED80AC3B894523D72E375DACAA2FC6606C18EDF680FE95903086C8B5E14A' _file = MagicMock() _file.value = 'some mocked value' _file.type = 'some mocked type' _file.filename = 'filename.txt' mock_fields.return_value = {'attachment': _file} when(self.mail_service).save_attachment('some mocked value', 'some mocked type').thenReturn(defer.succeed(attachment_id)) d = self.web.get(request) def assert_response(_): self.assertEqual(201, request.code) self.assertEqual('/attachment/%s' % attachment_id, request.responseHeaders.getRawHeaders("location")[0]) response_json = {'ident': attachment_id, 'content-type': 'some mocked type', 'name': 'filename.txt', 'size': 17, 'encoding': 'base64'} self.assertEqual(response_json, json.loads(request.written[0])) verify(self.mail_service).save_attachment('some mocked value', 'some mocked type') d.addCallback(assert_response) return d
def test_delete_vm(self, stray_file, expected, islink, isdir): datastore = MagicMock() datastore.id = "ds1" datastore.type = 1 vm_resource = MagicMock() vm_resource.datastore = "ds1" datastores = [datastore] """Test deleting a VM""" self.vm_manager.get_resource = MagicMock(return_value=vm_resource) self.vm_manager._ds_manager.get_datastores = MagicMock(return_value=datastores) self.vm_manager.vim_client.delete_vm = MagicMock(return_value="/vmfs/volumes/fake/vm_vm_foo/vm_foo") self.vm_manager._logger = MagicMock() self.vm_manager.vim_client.delete_file = MagicMock() with patch.object(os, "listdir", return_value=[stray_file]): self.vm_manager.delete_vm("vm_foo") self.vm_manager.get_resource.assert_called_once_with("vm_foo") self.vm_manager._ds_manager.get_datastores.assert_called_once_with() self.vm_manager.vim_client.delete_vm.assert_called_once_with("vm_foo", False) self.vm_manager.vim_client.delete_file.assert_called_once_with("/vmfs/volumes/fake/vm_vm_foo") self.vm_manager._logger.info.assert_has_calls(call(expected)) self.vm_manager._logger.warning.assert_called_once_with( "Force delete vm directory /vmfs/volumes/fake/vm_vm_foo")
def test_status_2(self): pr1, pr2 = MagicMock(), MagicMock() pr1.type, pr1.status = IrmaProbeType.antivirus, None pr2.type, pr2.status = IrmaProbeType.antivirus, 1 self.fw.probe_results = [pr1, pr2] self.assertEqual(self.fw.status, None)
def test_reservation_session_creation(self): """ This test verifies that the session coordinator can correctly create usage sessions from the reservation schedule. It also tests that it correctly handles conflicting reservations. That is, reservations that concurrently use the same pipeline or hardware devices. Finally, it also tests that the session coordinator can return a list of a given user's active sessions. """ # Load in some valid configuration and set the defaults using validate_configuration() self.config.read_configuration(self.source_data_directory+'/core/tests/data/test_config_basic.yml') self.config.read_configuration(self.source_data_directory+'/hardware/pipelines/tests/data/pipeline_configuration_valid.yml') self.config.validate_configuration() # Setup the pipeline manager test_pipelines = pipeline_manager.PipelineManager(self.device_manager, self.command_parser) # Create the expected mock services test_tracker_service = MagicMock() test_tracker_service.id = "sgp4" test_tracker_service.type = "tracker" test_pipelines.pipelines['test_pipeline3'].register_service(test_tracker_service) test_logger_service = MagicMock() test_logger_service.id = "basic" test_logger_service.type = "logger" test_pipelines.pipelines['test_pipeline3'].register_service(test_logger_service) # Setup the schedule manager test_schedule = schedule.ScheduleManager(self.source_data_directory+'/sessions/tests/data/test_schedule_valid.json') # Initialize the session coordinator session_coordinator = coordinator.SessionCoordinator(test_schedule, self.device_manager, test_pipelines, self.command_parser) # Define an inline callback to resume execution after the schedule has been loaded def continue_test(loaded_schedule): # Try loading the user's reservations before they have been activated active_sessions = session_coordinator.load_user_sessions(1) self.assertTrue(len(active_sessions)==0) # Look for active reservations and create associated sessions session_coordinator._check_for_new_reservations() # Make sure that RES.5 failed (it uses a pipeline that contains errors in its setup commands) self.assertTrue('RES.5' in session_coordinator.closed_sessions and 'RES.5' not in session_coordinator.active_sessions, "RES.5, which uses a pipeline that contains fatal setup command errors, was not marked as "+ "closed as expected.") # Verify that either RES.2 or RES.3 is active (these reservations use the same pipeline at the same time, so only # one can be active at a time) self.assertTrue((('RES.2' in session_coordinator.active_sessions) and ('RES.3' in session_coordinator.closed_sessions)) or (('RES.2' in session_coordinator.closed_sessions) and ('RES.3' in session_coordinator.active_sessions)), "The conflicting active reservations (RES.2 or RES.3) defined in the test schedule aren't where "+ "they should be (one should be active and one should be closed due to the conflict error).") # Verify that RES.4 didn't get started (uses an invalid pipeline) self.assertTrue(('RES.4' not in session_coordinator.active_sessions) and ('RES.4' in session_coordinator.closed_sessions), "RES.4, which uses a non-existent pipeline, was not marked closed as expected.") # Verify that the expired reservation (RES.1) was not started self.assertTrue(('RES.1' not in session_coordinator.active_sessions), "RES.1, which is expired, was started when it should have been ignored.") # Attempt to reserve the pipeline that RES.2 or RES.3 is using (tests that it was correctly locked) self.assertRaises(pipeline.PipelineInUse, test_pipelines.pipelines['test_pipeline'].reserve_pipeline) # Load test_admin's active sessions (either RES.2 or RES.3 and RES.6) active_sessions = session_coordinator.load_user_sessions("1") self.assertTrue(len(active_sessions)==2) self.assertTrue(active_sessions[0].id=="RES.2" or active_sessions[0].id=="RES.3" or active_sessions[0].id=="RES.6") self.assertTrue(active_sessions[1].id=="RES.2" or active_sessions[1].id=="RES.3" or active_sessions[1].id=="RES.6") # Update the schedule to load in the reservations schedule_update_deferred = test_schedule.update_schedule() schedule_update_deferred.addCallback(continue_test) return schedule_update_deferred
def test_socket_set_with_unsupported_type(self): sock = MagicMock() sock.family = socket.AF_INET sock.type = 9001 self.assertRaises(ValueError, self.channel._socket_set, sock)