def test_run_auth_workflow_impicit_input(self): # wrong context type _ctx = Mock() _ctx.type = '<unknown>' ssh_mock = Mock() ssh_mock.connect = Mock(side_effect=OSError("e")) with patch("paramiko.SSHClient", Mock(return_value=ssh_mock)): with self.assertRaises(NonRecoverableError): tasks.run_as_workflow( {}, ctx=_ctx, calls=[{'action': 'ls'}], logger_file="/tmp/terminal.log", terminal_auth=json.loads(json.dumps( {'ip': 'ip', 'user': '******', 'password': '******'}))) # correct context type _ctx = Mock() _ctx.type = 'deployment' ssh_mock = Mock() ssh_mock.connect = Mock(side_effect=OSError("e")) with patch("paramiko.SSHClient", Mock(return_value=ssh_mock)): with self.assertRaises(OperationRetry): tasks.run_as_workflow( {}, ctx=_ctx, calls=[{'action': 'ls'}], logger_file="/tmp/terminal.log", terminal_auth=json.loads(json.dumps( {'ip': 'ip', 'user': '******', 'password': '******'}))) ssh_mock.connect.assert_called_with( 'ip', allow_agent=False, look_for_keys=False, password='******', port=22, timeout=5, username='******')
def test_tree_type_checker(): # check that when NeuriteType.all, we accept all trees, w/o checking type tree_filter = tree_type_checker(NeuriteType.all) assert tree_filter('fake_tree') mock_tree = Mock() mock_tree.type = NeuriteType.axon # single arg tree_filter = tree_type_checker(NeuriteType.axon) assert tree_filter(mock_tree) mock_tree.type = NeuriteType.basal_dendrite assert not tree_filter(mock_tree) # multiple args tree_filter = tree_type_checker(NeuriteType.axon, NeuriteType.basal_dendrite) assert tree_filter(mock_tree) tree_filter = tree_type_checker( (NeuriteType.axon, NeuriteType.basal_dendrite)) assert tree_filter(mock_tree) tree_filter = tree_type_checker(*NEURITES) assert tree_filter('fake_tree') assert tree_filter(mock_tree) tree_filter = tree_type_checker(NEURITES) assert tree_filter('fake_tree') assert tree_filter(mock_tree)
def get_git_tree_mock(commit_sha, recursive=False): first_file = Mock() first_file.type = 'blob' first_file.path = 'Dockerfile' second_file = Mock() second_file.type = 'other' second_file.path = '/some/Dockerfile' third_file = Mock() third_file.type = 'blob' third_file.path = 'somesubdir/Dockerfile' t = Mock() if commit_sha == 'aaaaaaa': t.tree = [ first_file, second_file, third_file, ] else: t.tree = [] return t
def get_git_tree_mock(commit_sha, recursive=False): first_file = Mock() first_file.type = "blob" first_file.path = "Dockerfile" second_file = Mock() second_file.type = "other" second_file.path = "/some/Dockerfile" third_file = Mock() third_file.type = "blob" third_file.path = "somesubdir/Dockerfile" t = Mock() if commit_sha == "aaaaaaa": t.tree = [ first_file, second_file, third_file, ] else: t.tree = [] return t
def test_delete_fallback_no_type(self): event_source = random_string() detail_type = random_string() event = {'source': event_source, 'detail-type': detail_type} mock_default_processor = Mock() mock_default_processor.source = event_source mock_default_processor.type = None mock_default_processor.test_and_get_id = MagicMock( return_value=self.resource_id) mock_type_processor = Mock() mock_type_processor.source = event_source mock_type_processor.type = detail_type + random_string() mock_type_processor.test_and_get_id = MagicMock( side_effect=Exception('Wrong processor invoked')) mock_table = create_non_fung_table(self.table_name, self.index_name) self._insert_token(mock_table) self.assertEqual(1, self._get_resource_id_count(mock_table)) manager = EventProcessorManager( table_name=self.table_name, index_name=self.index_name, processors=[mock_default_processor, mock_type_processor]) manager._table = mock_table manager.process_event(event) self.assertEqual(0, self._get_resource_id_count(mock_table))
def test_tree_type_checker_broken(): tree_filter = tree_type_checker(NeuriteType.all) mock_tree = Mock() mock_tree.type = NeuriteType.soma nt.ok_(not tree_filter(mock_tree)) mock_tree.type = NeuriteType.undefined nt.ok_(not tree_filter(mock_tree))
def test_run_workflow_skip_uncofigured(self): # no settings _ctx = Mock() _ctx.type = 'deployment' tasks.run_workflow(inputs={}, ctx=_ctx) _ctx.logger.error.assert_called_with("Deployment id is undefined") _ctx = Mock() _ctx.type = 'deployment' tasks.run_workflow(inputs={'deployment_id': 'w_id'}, ctx=_ctx) _ctx.logger.error.assert_called_with("Workflow for run is undefined")
def test_type_filters(): axon = Mock() axon.type = NeuriteType.axon nt.ok_(axon_filter(axon)) nt.ok_(not dendrite_filter(axon)) basal_dendrite = Mock() basal_dendrite.type = NeuriteType.basal_dendrite nt.ok_(not axon_filter(basal_dendrite)) nt.ok_(dendrite_filter(basal_dendrite)) apical_dendrite = Mock() apical_dendrite.type = NeuriteType.apical_dendrite nt.ok_(not axon_filter(apical_dendrite)) nt.ok_(dendrite_filter(apical_dendrite))
def test_type_filters(): axon = Mock() axon.type = NeuriteType.axon assert axon_filter(axon) assert not dendrite_filter(axon) basal_dendrite = Mock() basal_dendrite.type = NeuriteType.basal_dendrite assert not axon_filter(basal_dendrite) assert dendrite_filter(basal_dendrite) apical_dendrite = Mock() apical_dendrite.type = NeuriteType.apical_dendrite assert not axon_filter(apical_dendrite) assert dendrite_filter(apical_dendrite)
def test_tree_type_checker_broken(): tree_filter = tree_type_checker(NeuriteType.all) assert tree_filter('fake_tree') mock_tree = Mock() mock_tree.type = NeuriteType.axon tree_filter = tree_type_checker(*NEURITES) assert tree_filter(mock_tree) tree_filter = tree_type_checker(NeuriteType.axon, NeuriteType.apical_dendrite, NeuriteType.basal_dendrite) mock_tree.type = NeuriteType.soma assert not tree_filter(mock_tree)
def test_no_listed_version_reviewer(self): user = user_factory() self.grant_permission(user, 'Addons:Review') obj = Mock(spec=[]) obj.type = amo.ADDON_EXTENSION obj.has_listed_versions = lambda: False for method in self.safe_methods: request = getattr(self.request_factory, method)('/') request.user = user # When not checking the object, we have permission because we're # authenticated. assert self.permission.has_permission(request, myview) # It doesn't work with the object though, since # has_listed_versions() is returning False, we don't have enough # permissions, being a "simple" reviewer. assert not self.permission.has_object_permission( request, myview, obj) for method in self.unsafe_methods: request = getattr(self.request_factory, method)('/') request.user = user # When not checking the object, we have permission because we're # authenticated. assert self.permission.has_permission(request, myview) # As above it doesn't work with the object though. assert not self.permission.has_object_permission( request, myview, obj)
def test_with_app_and_items(self): product = Mock() product.name = 'Steamcube' product.id = 9999 product.app_slug = 'scube' product.type = amo.ADDON_WEBAPP product.get_dev_url.return_value = reverse('mkt.developers.apps.edit', args=[product.app_slug]) s = render( """{{ hub_breadcrumbs(product, items=[('/foo', 'foo'), ('/bar', 'bar')]) }}""", { 'request': self.request, 'product': product }) crumbs = pq(s)('li') expected = [ ('Home', reverse('home')), ('Developers', reverse('ecosystem.landing')), ('My Submissions', reverse('mkt.developers.apps')), ('Steamcube', product.get_dev_url()), ('foo', '/foo'), ('bar', '/bar'), ] amo.tests.check_links(expected, crumbs, verify=False)
def test_get_payload_ignores_registration(self, Dhis2SettingsPatch, cases_referenced_by_xform, push_case): """ get_payload should raise IgnoreDocument given a registration form """ case_mock = Mock() case_mock.type = CASE_TYPE cases_referenced_by_xform.return_value = [case_mock] class Settings(object): dhis2 = { 'host': 'foo', 'username': '******', 'password': '******', 'top_org_unit_name': 'foo' } Dhis2SettingsPatch.for_domain.return_value = Settings() form_mock = MagicMock() form_mock.__getitem__.return_value = REGISTER_CHILD_XMLNS payload_generator = FormRepeaterDhis2EventPayloadGenerator(None) with self.assertRaises(IgnoreDocument): payload_generator.get_payload(None, form_mock)
def test_onError(self, log): error = Mock() error.type = "test" e = self.plugin.onError(error, sentinel.anything) self.assertEquals(len(e['events']), 1) self.assertEquals(e['events'][0]['severity'], ZenEventClasses.Error)
def test_fsync(self): file = "/dir1/dir2/file" testfs = vofs.VOFS(self.testMountPoint, self.testCacheDir, opt) testfs.client = Object() node = Mock(spec=vos.Node) node.isdir = Mock(return_value=False) node.props = Object node.props.get = Mock(side_effect=SideEffect({ ('islocked', False): False, ('length',): 10, ('MD5',): 12354, }, name="node.props.get")) node.type = "vos:DataNode" node.name = "testNode" testfs.client.get_node = Mock(return_value=node) with patch('vos.CadcCache.FileHandle') as mockFileHandle: mockFileHandle.return_value = MyFileHandle( file, testfs.cache, None) fh = testfs.open(file, os.O_RDWR | os.O_CREAT, None) HandleWrapper.file_handle(fh).cache_file_handle.fsync = \ Mock(wraps=HandleWrapper.file_handle( fh).cache_file_handle.fsync) testfs.fsync(file, False, fh) HandleWrapper.file_handle( fh).cache_file_handle.fsync.assert_called_once_with() HandleWrapper.file_handle( fh).cache_file_handle.fsync.assert_called_once_with()
def test_run_workflow_run_filter(self): _ctx = Mock() _ctx.type = 'deployment' fake_client = Mock() fake_client.deployments.get = Mock(return_value={ 'id': 'id', 'capabilities': { 'autouninstall': { 'value': True } } }) mock_manager = Mock() mock_manager.get_rest_client = Mock(return_value=fake_client) with patch('cloudify_hooks_workflow.tasks.manager', mock_manager): tasks.run_workflow( inputs={'deployment_id': 'w_id'}, workflow_for_run="uninstall", workflow_params={'force': True}, filter_by=[{ "path": ["deployment_capabilities", "autouninstall", "value"], "values": [True, "yes"] }], ctx=_ctx) fake_client.executions.start.assert_called_with( deployment_id='w_id', workflow_id='uninstall', force=True)
def test_create(self, mock_build, *args): rel = Mock() rel.type = 'cloudify.gcp.relationships.contained_in_network' rel.target.node.type = 'cloudify.gcp.nodes.Network' rel.target.node.properties = { 'auto_subnets': False, } rel.target.instance.runtime_properties = { 'selfLink': 'Look at me!', } self.ctxmock.instance.relationships.append(rel) self.ctxmock.node.properties.update({ 'use_external_resource': False, 'subnet': 'Toki Tori', }) subnetwork.create( name='subnet name', region='Bukit Bintang', subnet='Token Ring', ) mock_build().subnetworks().insert.assert_called_once_with( body={ 'ipCidrRange': 'Token Ring', 'network': 'Look at me!', 'name': 'subnetname', 'description': 'Cloudify generated subnetwork', }, project='not really a project', region='Bukit Bintang', )
def test_fsync(self): file = "/dir1/dir2/file" testfs = vofs.VOFS(self.testMountPoint, self.testCacheDir, opt) testfs.client = Object() node = Mock(spec=vos.Node) node.isdir = Mock(return_value=False) node.props = Object node.props.get = Mock(side_effect=SideEffect({ ('islocked', False): False, ('length',): 10, ('MD5',): 12354, }, name="node.props.get")) node.type = "vos:DataNode" node.name = "testNode" testfs.client.get_node = Mock(return_value=node) with patch('vofs.CadcCache.FileHandle') as mockFileHandle: mockFileHandle.return_value = MyFileHandle( file, testfs.cache, None) fh = testfs.open(file, os.O_RDWR | os.O_CREAT, None) HandleWrapper.file_handle(fh).cache_file_handle.fsync = \ Mock(wraps=HandleWrapper.file_handle( fh).cache_file_handle.fsync) testfs.fsync(file, False, fh) HandleWrapper.file_handle( fh).cache_file_handle.fsync.assert_called_once_with() HandleWrapper.file_handle( fh).cache_file_handle.fsync.assert_called_once_with()
def getDisk(self, disk_type, primary_count=0, has_extended=False, logical_count=0): """ Return a mock representing a parted.Disk. """ disk = Mock() disk.type = disk_type label_type_info = disklabel_types[disk_type] (max_primaries, supports_extended, max_logicals) = label_type_info # primary partitions disk.primaryPartitionCount = primary_count disk.maxPrimaryPartitionCount = max_primaries # extended partitions disk.supportsFeature = Mock(return_value=supports_extended) disk.getExtendedPartition = Mock(return_value=has_extended) # logical partitions disk.getMaxLogicalPartitions = Mock(return_value=max_logicals) disk.getLogicalPartitions = Mock(return_value=[0] * logical_count) return disk
def test_create_with_instance(self, mock_build, *args): mock_build().changes().create().execute.side_effect = [ { 'status': 'done' }, ] rel = Mock() rel.type = ('cloudify.gcp.relationships.' 'dns_record_connected_to_instance') rel.target.instance.runtime_properties = { 'kind': 'compute#instance', 'networkInterfaces': [{ 'accessConfigs': [{ 'natIP': 'intellectual property', }] }], } # rel_target is the Instance instance context. The instance itself must # be connected to an external IP, so we need to mock its relationships # too rel_rel = Mock() rel_rel.type = 'cloudify.gcp.relationships.instance_connected_to_ip' rel.target.instance.relationships = [rel_rel] self.ctxmock.instance.relationships.append(rel) record.create( 'type', 'name', [], 'ttl', ) mock_build().changes().create.assert_called_with( body={ 'additions': [{ 'rrdatas': ['intellectual property'], 'type': 'type', 'name': 'name.example.com.', 'ttl': 'ttl', }] }, managedZone='target instance', project='not really a project', )
def test_authenticated_but_not_reviewer(self): request = self.request_factory.get('/') request.user = user_factory() obj = Mock(spec=[]) obj.type = amo.ADDON_EXTENSION obj.has_listed_versions = lambda: True assert self.permission.has_permission(request, myview) assert not self.permission.has_object_permission(request, myview, obj)
def mock_upload_session(): upload_session_mock_object = Mock(UploadSession) upload_session_mock_object.total_parts = 4 upload_session_mock_object.part_size = 2 upload_session_mock_object.id = 'F971964745A5CD0C001BZ4E58196BFD' upload_session_mock_object.type = 'upload_session' upload_session_mock_object.num_parts_processed = 0 return upload_session_mock_object
def test_fsync2(self): file = "/dir1/dir2/file" testfs = vofs.VOFS(self.testMountPoint, self.testCacheDir, opt) testfs.client = Object() node = Mock(spec=vos.Node) node.isdir = Mock(return_value=False) node.props = Object node.props.get = Mock(side_effect=SideEffect({ ('islocked', False): False, ('length',): 10, ('MD5',): 12354, }, name="node.props.get")) node.type = "vos:DataNode" testfs.client.get_node = Mock(return_value=node) # Try flushing on a read-only file. with patch('vofs.CadcCache.FileHandle') as mockFileHandle: mockFileHandle.return_value = MyFileHandle( file, testfs.cache, None) fh = testfs.open(file, os.O_RDONLY, None) self.assertFalse(HandleWrapper.file_handle(fh).cache_file_handle. fileModified) HandleWrapper.file_handle(fh).cache_file_handle.fsync = \ Mock(wraps=HandleWrapper.file_handle(fh).cache_file_handle. fsync) with self.assertRaises(FuseOSError) as e: testfs.fsync(file, False, fh) self.assertEqual(e.exception.errno, EPERM) self.assertEqual(HandleWrapper.file_handle(fh).cache_file_handle. fsync.call_count, 0) self.assertFalse(HandleWrapper.file_handle(fh).cache_file_handle. fileModified) testfs.release(file, fh) # Try with an invalid file descriptor with self.assertRaises(FuseOSError) as e: testfs.fsync(file, False, -1) self.assertEqual(e.exception.errno, EIO) # Try flushing on a read-only file system. with patch('vofs.CadcCache.FileHandle') as mockFileHandle: myopt = copy.copy(opt) testfs = vofs.VOFS(self.testMountPoint, self.testCacheDir, myopt) mockFileHandle.return_value = MyFileHandle( file, testfs.cache, None) myopt.readonly = True testfs.client = Object() testfs.client.get_node = Mock(return_value=node) fh = testfs.open(file, os.O_RDONLY, None) HandleWrapper.file_handle(fh).cache_file_handle.fsync = \ Mock(wraps=HandleWrapper.file_handle(fh).cache_file_handle. fsync) testfs.fsync(file, False, fh) self.assertEqual( HandleWrapper.file_handle(fh).cache_file_handle.fsync. call_count, 0) testfs.release(file, fh)
def test_build_rr_observation(self): reservation = Reservation(priority=None, duration=10, possible_windows_dict={}) reservation.scheduled_start = 0 reservation.scheduled_resource = '1m0a.doma.bpl' proposal = Proposal({ 'id': 'testPro', 'tag': 'tagPro', 'tac_priority': 39, 'pi': 'me' }) target = ICRSTarget({'name': 'test', 'ra': 23.3, 'dec': 22.2}) request_group = RequestGroup(operator='single', requests=None, proposal=proposal, expires=None, rg_id=333333, is_staff=False, name=None, ipp_value=1.0, observation_type="RAPID_RESPONSE", submitter='') configuration = Mock() configuration.guiding_config = {'mode': 'ON', 'optional': True} configuration.type = 'EXPOSE' configuration.instrument_type = '1M0-FAKE-SCICAM' configuration.constraints = {} configuration.id = 13 configuration.target = target request = Request( configurations=[configuration], windows=None, request_id=22223, ) reservation.request = request reservation.request_group = request_group configdb_interface = Mock() configdb_interface.get_specific_instrument.return_value = 'xx03' configdb_interface.get_autoguider_for_instrument.return_value = 'xx04' received = build_observation(reservation, self.start, configdb_interface) assert_equal(received['request'], 22223) assert_equal(received['site'], 'bpl') assert_equal(received['enclosure'], 'doma') assert_equal(received['telescope'], '1m0a') assert_equal(received['configuration_statuses'][0]['configuration'], 13) assert_equal(received['configuration_statuses'][0]['instrument_name'], 'xx03') assert_equal( received['configuration_statuses'][0]['guide_camera_name'], 'xx04')
def test_anchor(self): target = Mock() target.type = "node" source = Mock() source.type = "node" relation = Mock() relation.type = "edge" # given two node objects connected by an edge object target.name = "target" source.name = "source" properties = {"source-object": "source", "target-object": "target"} def getitem(name): return properties[name] relation.__getitem__ = Mock(side_effect=getitem) relation.name = "relation" # added to diagram self.diagram.add(relation) self.diagram.add(target) self.diagram.add(source) # when anchors are set self.diagram.set_anchors() # each DrawableNode object should be referenced by proper DrawableEdge object's anchors drawable_relation = self.diagram.edges["relation"] drawable_source = self.diagram.nodes["source"] drawable_target = self.diagram.nodes["target"] self.assertEquals(drawable_source, drawable_relation.source_anchor.slot) self.assertEquals(drawable_target, drawable_relation.target_anchor.slot) # and each DrawableNode object's anchor should reference the # DrawableEdge object self.assertEquals(drawable_relation, drawable_source.anchors.pop().connector) self.assertEquals(drawable_relation, drawable_target.anchors.pop().connector)
def test_authenticated_but_not_reviewer(self): request = self.request_factory.get('/') request.user = user_factory() obj = Mock(spec=[]) obj.type = amo.ADDON_EXTENSION obj.has_listed_versions = lambda: True assert self.permission.has_permission(request, myview) assert not self.permission.has_object_permission( request, myview, obj)
def test_user_cannot_be_anonymous(self): request = self.request_factory.get('/') request.user = AnonymousUser() obj = Mock(spec=[]) obj.type = amo.ADDON_EXTENSION obj.has_listed_versions = lambda: True assert not self.permission.has_permission(request, myview) assert not self.permission.has_object_permission(request, myview, obj)
def r53_mock(self): ''' Mock route53 connection and dsn records Returns: R53 Mock object ''' r53_mock = Mock() r53_connect_result = Mock(name='r53_connect') r53_mock.return_value = r53_connect_result m1 = Mock(alias_dns_name="unittest1") m1.name = 'unittest_elb-12345678.dsd.io.' m1.type = 'A' m1.alias_hosted_zone_id = "ASDAKSLSA" m1.alias_evaluate_target_health = False m2 = Mock(resource_records=['"12345678"']) m2.name = 'stack.active.unittest-dev.dsd.io.' m2.type = 'TXT' m2.alias_hosted_zone_id = "ASDAKSLSA" m2.alias_evaluate_target_health = False m3 = Mock(alias_dns_name="unittest1") m3.name = 'unittest_elb.dsd.io.' m3.type = 'A' m3.alias_hosted_zone_id = "ASDAKSLSA" m3.alias_evaluate_target_health = False m4 = Mock(resource_records=['"12345678"']) m4.name = 'stack.test.unittest-dev.dsd.io.' m4.type = 'TXT' m4.alias_hosted_zone_id = "ASDAKSLSA" m4.alias_evaluate_target_health = False m5 = Mock(resource_records=['"12345678"']) m5.name = 'deployarn.test.unittest-dev.dsd.io.' m5.type = 'TXT' m5.alias_hosted_zone_id = "ASDAKSLSA" m5.alias_evaluate_target_health = False response = [m1, m2, m3, m4, m5] mock_config = { 'update_dns_record.return_value': True, 'get_all_rrsets.return_value': response, 'delete_dns_record.return_value': True } r53_connect_result.configure_mock(**mock_config) boto.route53.connect_to_region = r53_mock r = r53.R53("profile_name") return r
def setUp(self): super(TestGCPRecord, self).setUp() rel = Mock() rel.type = 'cloudify.gcp.relationships.dns_record_contained_in_zone' rel.target.instance.runtime_properties = { 'name': 'target instance', 'dnsName': 'example.com.', } self.ctxmock.instance.relationships = [rel]
def test_populate_database_non_existent_model(self): # given item = Mock() item.type = 'random' item.properties = [''] items = [item] # then with self.assertRaises(NonExistentModelException): populate_database(items)
def test_user_cannot_be_anonymous(self): request = self.request_factory.get('/') request.user = AnonymousUser() obj = Mock(spec=[]) obj.type = amo.ADDON_EXTENSION obj.has_listed_versions = lambda: True assert not self.permission.has_permission(request, myview) assert not self.permission.has_object_permission( request, myview, obj)
def new_device(self, *args, **kwargs): """ Return a new Device instance suitable for testing. """ device_class = kwargs.pop("device_class") # we intentionally don't pass the "exists" kwarg to the constructor # becauses this causes issues with some devices (especially partitions) # but we still need it for some LVs like VDO because we can't create # those so we need to fake their existence even for the constructor if device_class is blivet.devices.LVMLogicalVolumeDevice: exists = kwargs.get("exists", False) else: exists = kwargs.pop("exists", False) part_type = kwargs.pop("part_type", parted.PARTITION_NORMAL) device = device_class(*args, **kwargs) if exists: device._current_size = kwargs.get("size") if isinstance(device, blivet.devices.PartitionDevice): # if exists: # device.parents = device.req_disks device.parents = device.req_disks parted_partition = Mock() if device.disk: part_num = device.name[len(device.disk.name):].split("p")[-1] parted_partition.number = int(part_num) parted_partition.type = part_type parted_partition.path = device.path parted_partition.get_device_node_name = Mock( return_value=device.name) if len(device.parents) == 1: disk_name = device.parents[0].name number = device.name.replace(disk_name, "") try: parted_partition.number = int(number) except ValueError: pass device._parted_partition = parted_partition elif isinstance(device, blivet.devices.LVMVolumeGroupDevice) and exists: device._complete = True device.exists = exists device.format.exists = exists if isinstance(device, blivet.devices.PartitionDevice): # PartitionDevice.probe sets up data needed for resize operations device.probe() return device
def setUp(self): self.deck_validator = DeckValidator() self.empty_deck = Deck([]) self.oversized_deck = Deck([Mock() for _ in range(60)]) outfit = Mock() outfit.type = 'Outfit' self.multi_outfit_deck = [outfit, outfit] joker = Mock() joker.type = 'Joker' self.triple_joker_deck = [joker, joker, joker] nate_hunter = MagicMock() nate_hunter.type = 'Dude' nate_hunter.name = 'Nate Hunter' nate_hunter.__str__.return_value = nate_hunter.name self.over_four_copies_deck = [nate_hunter, nate_hunter, nate_hunter, nate_hunter, nate_hunter] self.really_erroneous_deck = self.multi_outfit_deck + self.triple_joker_deck + self.over_four_copies_deck
def test_no_event_source(self): event = {'detail': {'state': 'TERMINATED'}} mock_processor = Mock() mock_processor.source = random_string() mock_processor.type = None manager = EventProcessorManager(table_name=self.table_name, index_name=self.index_name, processors=[mock_processor]) self.assertRaises(ValueError, manager.process_event, event)
def test_populate_database_add_project(self): # given item = Mock() item.type = 'project' item.properties = ['title', 'filename', 'techno', 'url', 1] items = [item] # when populate_database(items) # then self.assert_project_with_title_exists_in_database(item.properties[0])
def test_populate_database_add_post(self): # given item = Mock() item.type = 'post' item.properties = ['title', 'filename', datetime.now(), 'category', 'author'] items = [item] # when populate_database(items) # then self.assert_post_with_title_exists_in_database(item.properties[0])
def test_get_df_handles_dttm_col(self): form_data = {'dummy': 123} query_obj = {'granularity': 'day'} results = Mock() results.query = Mock() results.status = Mock() results.error_message = Mock() datasource = Mock() datasource.type = 'table' datasource.query = Mock(return_value=results) mock_dttm_col = Mock() datasource.get_col = Mock(return_value=mock_dttm_col) test_viz = viz.BaseViz(datasource, form_data) test_viz.df_metrics_to_num = Mock() test_viz.get_fillna_for_columns = Mock(return_value=0) results.df = pd.DataFrame(data={DTTM_ALIAS: ['1960-01-01 05:00:00']}) datasource.offset = 0 mock_dttm_col = Mock() datasource.get_col = Mock(return_value=mock_dttm_col) mock_dttm_col.python_date_format = 'epoch_ms' result = test_viz.get_df(query_obj) print(result) import logging logging.info(result) pd.testing.assert_series_equal( result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 5, 0)], name=DTTM_ALIAS), ) mock_dttm_col.python_date_format = None result = test_viz.get_df(query_obj) pd.testing.assert_series_equal( result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 5, 0)], name=DTTM_ALIAS), ) datasource.offset = 1 result = test_viz.get_df(query_obj) pd.testing.assert_series_equal( result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 6, 0)], name=DTTM_ALIAS), ) datasource.offset = 0 results.df = pd.DataFrame(data={DTTM_ALIAS: ['1960-01-01']}) mock_dttm_col.python_date_format = '%Y-%m-%d' result = test_viz.get_df(query_obj) pd.testing.assert_series_equal( result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 0, 0)], name=DTTM_ALIAS), )
def test_populate_database_add_status(self): # given item = Mock() item.type = 'status' item.properties = ['status_txt'] items = [item] # when populate_database(items) # then self.assert_status_with_name_exists_in_database(item.properties[0])
def _get_filter(self, data, resources): manager = Mock() manager.get_session.return_value.get_subscription_id.return_value = \ self.session.get_subscription_id() manager.get_client.return_value.query.usage_by_scope.return_value = \ self._get_costs(resources) if 'Microsoft.Compute/virtualMachines' in resources[0]['id']: manager.resource_type.resource_type = 'Microsoft.Compute/virtualMachines' else: manager.type = 'resourcegroup' manager.resource_type.resource_type = 'Microsoft.Resources/subscriptions/resourceGroups' return CostFilter(data=data, manager=manager)
def test_populate_database_add_user(self): # given item = Mock() item.type = 'user' item.properties = ['bob', '*****@*****.**', 'bobpw'] items = [item] # when populate_database(items) # then self.assert_user_with_username_exists_in_database(item.properties[0])
def test_alert_should_be_ignored_when_subscription_says_so(self): before = datetime(2010, 1, 1, 23, 59, 0) later = datetime(2010, 1, 2, 0, 0, 0) qd_alert = Mock() qd_alert.alert.history.end_time = before sub = Mock() sub.ignored_resolved_alerts = True sub.type = AlertSubscription.DAILY self.assertTrue(alert_should_be_ignored(qd_alert, sub, later))
def test_populate_database_add_technology(self): # given item = Mock() item.type = 'technology' item.properties = ['techno_txt'] items = [item] # when populate_database(items) # then self.assert_technology_with_name_exists_in_database(item.properties[0])
def test_tree_type_checker(): #check that when NeuriteType.all, we accept all trees, w/o checking type tree_filter = tree_type_checker(NeuriteType.all) nt.ok_(tree_filter('fake_tree')) mock_tree = Mock() mock_tree.type = NeuriteType.axon #single arg tree_filter = tree_type_checker(NeuriteType.axon) nt.ok_(tree_filter(mock_tree)) mock_tree.type = NeuriteType.basal_dendrite nt.ok_(not tree_filter(mock_tree)) #multiple args tree_filter = tree_type_checker(NeuriteType.axon, NeuriteType.basal_dendrite) nt.ok_(tree_filter(mock_tree)) tree_filter = tree_type_checker(*NEURITES) nt.ok_(tree_filter('fake_tree'))
def test_create_with_instance(self, mock_build, *args): mock_build().changes().create().execute.side_effect = [ {'status': 'done'}, ] rel = Mock() rel.type = ('cloudify.gcp.relationships.' 'dns_record_connected_to_instance') rel.target.instance.runtime_properties = { 'kind': 'compute#instance', 'networkInterfaces': [{'accessConfigs': [{ 'natIP': 'intellectual property', }]}], } # rel_target is the Instance instance context. The instance itself must # be connected to an external IP, so we need to mock its relationships # too rel_rel = Mock() rel_rel.type = 'cloudify.gcp.relationships.instance_connected_to_ip' rel.target.instance.relationships = [rel_rel] self.ctxmock.instance.relationships.append(rel) record.create( 'type', 'name', [], 'ttl', ) mock_build().changes().create.assert_called_with( body={ 'additions': [{ 'rrdatas': ['intellectual property'], 'type': 'type', 'name': 'name.example.com.', 'ttl': 'ttl', }]}, managedZone='target instance', project='not really a project', )
def test_contactviewset_email_sent(self): """ Validate email is sent. """ view = ContactFormViewSet() view.request = Mock() obj = Mock() obj.type = 'MessageType' obj.content = 'Message content' view.post_save(obj, created=True) self.assertEqual(len(mail.outbox), 1) self.assertIn('MessageType', mail.outbox[0].subject) self.assertEqual('Message content', mail.outbox[0].body)
def test_admin(self): user = user_factory() self.grant_permission(user, '*:*') for method in self.safe_methods + self.unsafe_methods: request = getattr(self.request_factory, method)('/') request.user = user obj = Mock(spec=[]) obj.type = amo.ADDON_EXTENSION obj.has_listed_versions = lambda: True assert self.permission.has_permission(request, myview) assert self.permission.has_object_permission( request, myview, obj)
def test_exercise_sell(self): order = Mock() order.type = 'sell' order.bond_amount = 156 self.mock_create.return_value = ['123'] confirmation_obj = self.trade_service.exercise(order) self.mock_create.assert_called_once_with(order) self.mock_ionobj.assert_called_once_with('Confirmation', status='complete', tracking_number='123', proceeds= 156 * 1.56) self.assertTrue(confirmation_obj is self.mock_ionobj.return_value)
def test_with_app(self): product = Mock() product.name = 'Steamcube' product.id = 9999 product.app_slug = 'scube' product.type = amo.ADDON_WEBAPP s = render("""{{ hub_breadcrumbs(product) }}""", {'request': self.request, 'product': product}) crumbs = pq(s)('li') expected = [ ('My Submissions', reverse('mkt.developers.apps')), ('Steamcube', None), ] amo.tests.check_links(expected, crumbs, verify=False)
def test_get_df_handles_dttm_col(self): datasource = Mock() datasource.type = 'table' datasource.offset = 1 mock_dttm_col = Mock() mock_dttm_col.python_date_format = 'epoch_ms' datasource.get_col = Mock(return_value=mock_dttm_col) form_data = {'dummy': 123} query_obj = {'granularity': 'day'} results = Mock() results.query = Mock() results.status = Mock() results.error_message = Mock() df = Mock() df.columns = [DTTM_ALIAS] f_datetime = datetime(1960, 1, 1, 5, 0) df.__getitem__ = Mock(return_value=pd.Series([f_datetime])) df.__setitem__ = Mock() df.replace = Mock() df.fillna = Mock() results.df = df results.df.empty = False datasource.query = Mock(return_value=results) test_viz = viz.BaseViz(datasource, form_data) test_viz.df_metrics_to_num = Mock() test_viz.get_fillna_for_columns = Mock(return_value=0) test_viz.get_df(query_obj) mock_call = df.__setitem__.mock_calls[0] self.assertEqual(mock_call[1][0], DTTM_ALIAS) self.assertFalse(mock_call[1][1].empty) self.assertEqual(mock_call[1][1][0], f_datetime) mock_call = df.__setitem__.mock_calls[1] self.assertEqual(mock_call[1][0], DTTM_ALIAS) self.assertEqual(mock_call[1][1][0].hour, 6) self.assertEqual(mock_call[1][1].dtype, 'datetime64[ns]') mock_dttm_col.python_date_format = 'utc' test_viz.get_df(query_obj) mock_call = df.__setitem__.mock_calls[2] self.assertEqual(mock_call[1][0], DTTM_ALIAS) self.assertFalse(mock_call[1][1].empty) self.assertEqual(mock_call[1][1][0].hour, 7) mock_call = df.__setitem__.mock_calls[3] self.assertEqual(mock_call[1][0], DTTM_ALIAS) self.assertEqual(mock_call[1][1][0].hour, 6) self.assertEqual(mock_call[1][1].dtype, 'datetime64[ns]') mock_call = df.__setitem__.mock_calls[4] self.assertEqual(mock_call[1][0], DTTM_ALIAS) self.assertEqual(mock_call[1][1][0].hour, 7) self.assertEqual(mock_call[1][1].dtype, 'datetime64[ns]')
def test_with_app(self): addon = Mock() addon.name = 'Firebug' addon.id = 1843 addon.app_slug = 'fbug' addon.type = amo.ADDON_WEBAPP s = render("""{{ hub_breadcrumbs(addon) }}""", {'request': self.request, 'addon': addon}) doc = pq(s) crumbs = doc('li') eq_(crumbs.eq(0).text(), 'My Submissions') eq_(crumbs.eq(0).children('a').attr('href'), reverse('mkt.developers.apps')) eq_(crumbs.eq(1).text(), 'Firebug') eq_(crumbs.eq(1).children('a'), [])
def test_theme_reviewer(self): user = user_factory() self.grant_permission(user, 'Addons:ThemeReview') obj = Mock(spec=[]) obj.type = amo.ADDON_STATICTHEME obj.has_listed_versions = lambda: True for method in self.safe_methods + self.unsafe_methods: request = getattr(self.request_factory, method)('/') request.user = user assert self.permission.has_permission(request, myview) assert self.permission.has_object_permission( request, myview, obj) # Does not have access to other extensions. obj.type = amo.ADDON_EXTENSION for method in self.safe_methods + self.unsafe_methods: request = getattr(self.request_factory, method)('/') request.user = user # When not checking the object, we have permission because we're # authenticated. assert self.permission.has_permission(request, myview) assert not self.permission.has_object_permission( request, myview, obj)
def test_get_df_returns_empty_df(self): datasource = Mock() datasource.type = 'table' form_data = {'dummy': 123} query_obj = {'granularity': 'day'} results = Mock() results.query = Mock() results.status = Mock() results.error_message = None results.df = pd.DataFrame() datasource.query = Mock(return_value=results) test_viz = viz.BaseViz(datasource, form_data) result = test_viz.get_df(query_obj) self.assertEqual(type(result), pd.DataFrame) self.assertTrue(result.empty)
def get_datasource_mock(self): datasource = Mock() results = Mock() results.query = Mock() results.status = Mock() results.error_message = None results.df = pd.DataFrame() datasource.type = 'table' datasource.query = Mock(return_value=results) mock_dttm_col = Mock() datasource.get_col = Mock(return_value=mock_dttm_col) datasource.query = Mock(return_value=results) datasource.database = Mock() datasource.database.db_engine_spec = Mock() datasource.database.db_engine_spec.mutate_expression_label = lambda x: x return datasource
def test_process_metrics(self): # test TableViz metrics in correct order form_data = { 'url_params': {}, 'row_limit': 500, 'metric': 'sum__SP_POP_TOTL', 'entity': 'country_code', 'secondary_metric': 'sum__SP_POP_TOTL', 'granularity_sqla': 'year', 'page_length': 0, 'all_columns': [], 'viz_type': 'table', 'since': '2014-01-01', 'until': '2014-01-02', 'metrics': [ 'sum__SP_POP_TOTL', 'SUM(SE_PRM_NENR_MA)', 'SUM(SP_URB_TOTL)', ], 'country_fieldtype': 'cca3', 'percent_metrics': [ 'count', ], 'slice_id': 74, 'time_grain_sqla': None, 'order_by_cols': [], 'groupby': [ 'country_name', ], 'compare_lag': '10', 'limit': '25', 'datasource': '2__table', 'table_timestamp_format': '%Y-%m-%d %H:%M:%S', 'markup_type': 'markdown', 'where': '', 'compare_suffix': 'o10Y', } datasource = Mock() datasource.type = 'table' test_viz = viz.BaseViz(datasource, form_data) expect_metric_labels = [u'sum__SP_POP_TOTL', u'SUM(SE_PRM_NENR_MA)', u'SUM(SP_URB_TOTL)', u'count', ] self.assertEqual(test_viz.metric_labels, expect_metric_labels) self.assertEqual(test_viz.all_metrics, expect_metric_labels)