def test_DefaultBounds(self): expected = ([0.0, 0.0, 0.0, 0.0, -20.0, -20.0], [20.0, 20.0, 20.0, 20.0, 20.0, 20.0]) actual = default_bounds(2, 2) assert_tuple_equal( expected, actual, "Expected {}\nbut actual {}".format(expected, actual))
def test_wqstd(self): nt.assert_true(isinstance(self.db.wqstd, pandas.DataFrame)) nt.assert_tuple_equal(self.db.wqstd.shape, (48, 4)) nt.assert_list_equal( ['parameter', 'units', 'lower_limit', 'upper_limit'], self.db.wqstd.columns.tolist() )
def test_get_device(): active_devices = { u'addr1': (1406362740.570188, u'192.168.1.2', u'tom'), u'addr2': (1006362740.0, u'192.168.1.82', u'william'), } n.assert_tuple_equal(u.get_device(active_devices, u'192.168.1.82'), ('addr2', u'william')) n.assert_tuple_equal(u.get_device(active_devices, u'192.168.4.10'), (None, None))
def _DISABLED_test_render_simple_contour(): """Umaco exapmle of a simple arrow-shaped contour""" gerber = _test_render('resources/example_simple_contour.gbr', 'golden/example_simple_contour.png') # Check the resulting dimensions assert_tuple_equal(((2.0, 11.0), (1.0, 9.0)), gerber.bounding_box)
def test_round_off_error(): (x, y) = sgrid.node_coords((135, 127), (5.4563957090392, 5.4563957090392), (0., 0.)) assert_tuple_equal(x.shape, (135 * 127, )) assert_tuple_equal(y.shape, (135 * 127, ))
def test_writing_reading_test_data(): column_names = ('text', 'input_text_languages_str', 'input_int1_int', 'input_arg_bool', 'expected1', 'expected2') texts = (None, 'text2', 'text3', '###text4') values = ((('l1', '11', 'False', 'e11', 'e12'), ), (('l2', '22', 'True', 'e21', 'e22'), ), (('l3', '33', 'False', 'e31', 'e32'), ('l3', '33', 'False', 'e31', 'e32'), 'string'), ()) file_name = None try: file_name = lexnlp_tests.write_test_data_text_and_tuple( texts, values, column_names) actual = [ (i, text, input_args, expected) for i, text, input_args, expected in lexnlp_tests.iter_test_data_text_and_tuple(call_stack_offset=1) ] a2 = actual[0] e2 = (2, 'text2', { 'text_languages': 'l2', 'arg': True, 'int1': 22 }, [('e21', 'e22')]) print('Actual: {0}'.format(a2)) print('Expected: {0}'.format(e2)) assert_equals(a2[0], e2[0]) assert_equals(a2[1], e2[1]) assert_dict_equal(a2[2], e2[2]) assert_equals(len(a2[3]), len(e2[3])) assert_tuple_equal(a2[3][0], e2[3][0]) finally: if file_name: os.remove(file_name)
def test_fake_bathy(self): elev = misc.interpolate_bathymetry(None, self.grid.x_rho, self.grid.y_rho) nptest.assert_array_equal( elev, np.ma.MaskedArray(data=np.zeros(self.grid.x_rho.shape), mask=self.grid.x_rho.mask) ) nt.assert_tuple_equal(elev.shape, self.grid.x_rho.shape)
def test_nodes(): """Test getting nodes at links from a voronoi.""" converter = VoronoiConverter(Voronoi(POINTS)) nodes = converter.get_nodes() assert_tuple_equal(nodes.shape, (10, 2)) assert_is_instance(nodes[0, 0], float)
def test_getWQData(self): ed1 = self.db.getWQData('ED-1') nt.assert_true(isinstance(ed1, pandas.DataFrame)) nt.assert_tuple_equal(ed1.shape, (12, 11)) lv1 = self.db.getWQData('LV-1') nt.assert_true(isinstance(lv1, pandas.DataFrame)) nt.assert_tuple_equal(lv1.shape, (0, 11))
def test_read_config_params(): tmp = tempfile.NamedTemporaryFile() copyfile(CONFIG, tmp.name) observed = p.read_config_params(tmp.name) expected = ('the-ca-bundle-file.crt', 'https://delaware.dada.pink', 'tlevine', '065db5d3-924c-4129-9c93-2360538a4ce5') n.assert_tuple_equal(observed, expected)
def check_convert_dates(raw, expectation): result = convert_dates(raw, lambda x: convert_date(x, year = 2014)) if expectation == None: n.assert_is_none(result) else: n.assert_is_not_none(result) observed = tuple((x.month, x.day) for x in result) n.assert_tuple_equal(observed, expectation)
def _test(*args, **kwargs): expect = ('foo', 'bar') assert_tuple_equal(args, expect) expect = dict([ ('baz', 'qux'), ('ledger_hash', 'foo_ledger'), ]) assert_dict_equal(kwargs, expect)
def test_2d_changing_shape(): #Create field and add some data to it that we will write to a # NetCDF file database. nc_file = os.path.join(_TMP_DIR, 'Temperature_time_series.nc') data = np.arange(6.) field = RasterField((3, 2), (1., 1.), (0., 0.)) field.add_field('Temperature', data, centering='point') db = Database() db.open(nc_file, 'Temperature') db.write(field) assert_true(os.path.isfile(nc_file)) # Create a new field and write the data to the database. Since # the size of the field has changed, the data will be written # to a new file, Elevation_time_series_0000.nc. field = RasterField((3, 3), (1., 1.), (0., 0.)) data = np.arange(9.) field.add_field('Temperature', data, centering='point') db.write(field) assert_true( os.path.isfile( os.path.join(_TMP_DIR, 'Temperature_time_series_0000.nc'))) db.close() try: nc_file = os.path.join(_TMP_DIR, 'Temperature_time_series_0000.nc') root = open_nc_file(nc_file) except Exception: raise AssertionError('%s: Could not open' % nc_file) else: assert_items_equal(['x', 'y', 'time'], root.dimensions.keys()) assert_items_equal(['Temperature', 'x', 'y', 'time', 'mesh'], root.variables) assert_equal(3, len(root.dimensions['x'])) assert_equal(3, len(root.dimensions['y'])) assert_equal(1, len(root.dimensions['time'])) assert_tuple_equal((1, 3, 3), root.variables['Temperature'].shape) assert_array_equal( np.arange(9.).reshape((3, 3)), root.variables['Temperature'][0]) assert_array_equal([0., 1., 2.], root.variables['x']) assert_array_equal([0., 1., 2.], root.variables['y']) assert_equal('Temperature', root.variables['Temperature'].long_name) assert_equal('-', root.variables['Temperature'].units) root.close()
def test_pack(): for x in (32, -3.7e-2, np.float64(3e31), -np.inf, np.int8(-3), np.uint8(-1), np.int16(-33), np.uint16(-33), np.int32(-3), np.uint32(-1), np.int64(373), np.uint64(-3)): assert_equal(x, unpack(pack(x)), "Scalars don't match!") x = np.nan assert_true(np.isnan(unpack(pack(x))), "nan scalar did not match!") x = np.random.randn(8, 10) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = np.random.randn(10) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = np.float32(np.random.randn(3, 4, 5)) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = np.int16(np.random.randn(1, 2, 3)) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = None assert_true(x is None, "None did not match") x = [None] assert_list_equal(x, unpack(pack(x))) x = {'name': 'Anonymous', 'age': 15, 99: datetime.now(), 'range': [110, 190], (11,12): None} assert_dict_equal(x, unpack(pack(x)), "Dict do not match!") x = uuid.uuid4() assert_equal(x, unpack(pack(x)), 'UUID did not match') x = Decimal("-112122121.000003000") assert_equal(x, unpack(pack(x)), "Decimal did not pack/unpack correctly") x = [1, datetime.now(), {1: "one", "two": 2}, (1, 2)] assert_list_equal(x, unpack(pack(x)), "List did not pack/unpack correctly") x = (1, datetime.now(), {1: "one", "two": 2}, (uuid.uuid4(), 2)) assert_tuple_equal(x, unpack(pack(x)), "Tuple did not pack/unpack correctly") x = (1, {datetime.now().date(): "today", "now": datetime.now().date()}, {"yes!": [1, 2, np.array((3, 4))]}) y = unpack(pack(x)) assert_dict_equal(x[1], y[1]) assert_array_equal(x[2]['yes!'][2], y[2]['yes!'][2]) x = {'elephant'} assert_set_equal(x, unpack(pack(x)), "Set did not pack/unpack correctly") x = tuple(range(10)) assert_tuple_equal(x, unpack(pack(range(10))), "Iterator did not pack/unpack correctly") x = Decimal('1.24') assert_true(x == unpack(pack(x)), "Decimal object did not pack/unpack correctly") x = datetime.now() assert_true(x == unpack(pack(x)), "Datetime object did not pack/unpack correctly")
def test_patches_at_link(): """Test getting link patches.""" converter = VoronoiConverter(Voronoi(POINTS)) patches_at_link = converter.get_patches_at_link() assert_tuple_equal(patches_at_link.shape, (11, 2)) for patches in patches_at_link: assert_true(patches[0] != -1 or patches[1] != -1)
def test_grid_var_names(): model = BmiHeat() model.initialize() names = model.get_input_var_names() assert_tuple_equal(names, ('plate_surface__temperature',)) names = model.get_output_var_names() assert_tuple_equal(names, ('plate_surface__temperature',))
def test_links_at_patch(): """Test getting links that bound patches from a voronoi.""" converter = VoronoiConverter(Voronoi(POINTS)) links_at_patch = converter.get_links_at_patch() assert_tuple_equal(links_at_patch.shape, (2, 6)) assert_true(np.all(links_at_patch >= 0)) assert_true(np.all(links_at_patch < 11))
def test_grid_var_names(): model = BmiHeat() model.initialize() names = model.get_input_var_names() assert_tuple_equal(names, ('plate_surface__temperature', )) names = model.get_output_var_names() assert_tuple_equal(names, ('plate_surface__temperature', ))
def test_guess_dialect(): empty = StringIO('') n.assert_equal(dsv.guess_dialect(empty), 'excel') interesting = StringIO('a;b;c\r\n3;";";8\r\n') dialect = dsv.guess_dialect(interesting) observed = tuple(getattr(dialect, attr) for attr in ('delimiter', 'doublequote', 'escapechar', 'lineterminator', 'quotechar', 'quoting', 'skipinitialspace')) expected = (';', False, None, '\r\n', '"', 0, False) n.assert_tuple_equal(observed, expected)
def test_repr_contains_db_info_tcp(self): pool = ConnectionPool(host='localhost', port=8888) assert_tuple_equal( re.match('(.*)<(.*)<(.*)>>', repr(pool)).groups(), ( 'ConnectionPool', 'Connection', 'host=localhost,port=8888' ) )
def test_my_stack(): """Testing my_stack function""" mats = [] for n in xrange(7): mats.append(np.random.rand(31, 31)) stacked = my_mfd.my_stack(mats) assert_is_instance(stacked, np.ndarray) assert_tuple_equal(stacked.shape, (7, 31, 31)) for n in xrange(7): assert_array_equal(stacked[n], mats[n])
def test_ridges_at_region(): """Test getting ridges that bound regions.""" v = Voronoi(POINTS) converter = VoronoiConverter(v) ridges_at_region = converter.get_ridges_at_region() assert_tuple_equal(ridges_at_region.shape, (len(v.regions), 6)) assert_is_instance(ridges_at_region[0, 0], np.int_) assert_array_equal(ridges_at_region[0], [-1] * 6)
def test_1_component_dict_interface(): palette = Palette(sample=Sample1) assert_dict_equal(dict(sample=Sample1), palette) assert_equal(len(palette), 1) assert_equal(palette.keys(), ['sample']) assert_equal(palette.values(), [Sample1]) items = palette.items() assert_tuple_equal(('sample', Sample1), items[0])
def test_corner_at_patch(): """Test getting corners for each patch.""" v = Voronoi(POINTS) converter = VoronoiConverter(v) corner_at_patch = converter.get_corner_at_patch() assert_tuple_equal(corner_at_patch.shape, (2, )) assert_true(np.all(corner_at_patch >= 0)) assert_true(np.all(corner_at_patch < len(v.points)))
def test_return_correct_value(self): ntools.assert_tuple_equal( (0, 0), gnc.find_index_by_point(self.lat, self.lon, (5, 0))) ntools.assert_tuple_equal( (9, 0), gnc.find_index_by_point(self.lat, self.lon, (0, 0))) ntools.assert_tuple_equal( (9, 9), gnc.find_index_by_point(self.lat, self.lon, (0, 5))) ntools.assert_tuple_equal( (0, 9), gnc.find_index_by_point(self.lat, self.lon, (5, 5))) ntools.assert_tuple_equal( (2, 7), gnc.find_index_by_point(self.lat, self.lon, (3.99, 3.99)))
def test_2d_constant_shape(): #Create field and add some data to it that we will write to a # NetCDF file database. #nc_file = 'Elevation_time_series_0000.nc' nc_file = os.path.join(_TMP_DIR, '2d_elevation_time_series.nc') data = np.arange(6.) field = RasterField((2, 3), (1., 1.), (0., 0.), indexing='ij') field.add_field('Elevation', data, centering='point') #Create database of 'Elevation' values. Data are written to the # NetCDF file Elevation_time_series.nc. db = Database() db.open(nc_file, 'Elevation') db.write(field) assert_true(os.path.isfile(nc_file)) # Append data to the NetCDF file. data *= 2. db.write(field) db.close() try: root = open_nc_file(nc_file) except Exception: raise AssertionError('%s: Could not open' % nc_file) else: assert_items_equal(['x', 'y', 'time'], root.dimensions.keys()) assert_items_equal(['Elevation', 'x', 'y', 'time', 'mesh'], root.variables) assert_equal(3, len(root.dimensions['x'])) assert_equal(2, len(root.dimensions['y'])) assert_equal(2, len(root.dimensions['time'])) assert_tuple_equal((2, 2, 3), root.variables['Elevation'].shape) assert_array_equal( np.arange(6.).reshape(2, 3), root.variables['Elevation'][0]) assert_array_equal( np.arange(6.).reshape((2, 3)) * 2., root.variables['Elevation'][1]) assert_array_equal([0., 1.], root.variables['y']) assert_array_equal([0., 1., 2.], root.variables['x']) assert_equal('Elevation', root.variables['Elevation'].long_name) assert_equal('-', root.variables['Elevation'].units) root.close()
def test_retrieve_3d(): start = dt.datetime(2005, 2, 1) # Day number 1858 (0 based), day 31 in file end = dt.datetime(2008, 6, 29) # Day number 3103, day 180 in file time_period = pd.date_range(start, end) extent = [slice(15, 25), slice(25, 45)] data = dataset.retrieve(time_period, extent) assert_tuple_equal((1245, 10, 20), data.shape) assert_array_equal(data[:, 0, 0], np.arange(1858, 3103)) assert_array_equal(data[:, 5, 5], np.zeros(1245))
def test_retrieve_3d_short(): start = dt.datetime(2005, 2, 1) # Day number 1858 (0 based), day 31 in file end = dt.datetime(2005, 3, 30) # Day number 1916 (0 based), day 89 in file time_period = pd.date_range(start, end) extent = [slice(15, 25), slice(25, 45)] data = dataset.retrieve(time_period, extent) assert_tuple_equal((58, 10, 20), data.shape) assert_array_equal(data[:, 0, 0], np.arange(1858, 1916)) assert_array_equal(data[:, 5, 5], np.zeros(58))
def test_nodes_at_link(): """Test getting nodes at links from a voronoi.""" converter = VoronoiConverter(Voronoi(POINTS)) nodes_at_link = converter.get_nodes_at_link() assert_tuple_equal(nodes_at_link.shape, (11, 2)) assert_true(np.all(nodes_at_link >= 0)) assert_true(np.all(nodes_at_link < 10)) assert_array_equal(np.unique(nodes_at_link), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
def test_process(self): raster = utils.polygons_to_raster(self.testfile, "GeoID", **self.kwargs) nt.assert_true(isinstance(raster, arcpy.Raster)) array = utils.rasters_to_arrays(raster, squeeze=True) arcpy.management.Delete(raster) flat_arr = array.flatten() bins = numpy.bincount(flat_arr[flat_arr > 0]) nptest.assert_array_almost_equal(numpy.unique(array), self.known_values) nptest.assert_array_almost_equal(bins[bins > 0], self.known_counts) nt.assert_tuple_equal(array.shape, self.known_shape)
def test_new_success(): tmp = mkdtemp() warehouse = Warehouse(tmp) url = 'http://a.b/c' @cache(tmp) def get(_): return 88 observed_response = get(url) n.assert_equal(observed_response, 88) n.assert_tuple_equal(warehouse[(url,)], (None, 88))
def test_node_at_vertex(): """Test mapping voronoi vertices to nodes.""" v = Voronoi(POINTS) converter = VoronoiConverter(v) node_at_vertex = converter.get_node_at_vertex() assert_tuple_equal(node_at_vertex.shape, (len(v.vertices), )) assert_equal(sum(node_at_vertex >= 0), 10) node_at_vertex = node_at_vertex[node_at_vertex >= 0] node_at_vertex.sort() assert_array_equal(node_at_vertex, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
def test_complex_matlab_blobs(self): blobs = Blob().fetch('blob', order_by='id') assert_equal(blobs[0][0], 'character string') assert_true(np.array_equal(blobs[1][0], np.r_[1:180:15])) assert_list_equal([r[0] for r in blobs[2]], ['string1', 'string2']) assert_list_equal([r[0, 0] for r in blobs[3]['a'][0]], [1, 2]) assert_tuple_equal(blobs[3]['b'][0, 0]['c'][0, 0].shape, (3, 3)) assert_true(np.array_equal(blobs[4], np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blobs[4].dtype == 'float64') assert_true(np.array_equal(blobs[5], np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blobs[5].dtype == 'uint8') assert_tuple_equal(blobs[6].shape, (2, 3, 4)) assert_true(blobs[6].dtype == 'complex128')
def test_complex_matlab_blobs(self): blobs = Blob().fetch.order_by('id')['blob'] assert_equal(blobs[0][0], 'character string') assert_true(np.array_equal(blobs[1][0], np.r_[1:180:15])) assert_list_equal([r[0] for r in blobs[2]], ['string1', 'string2']) assert_list_equal([r[0, 0] for r in blobs[3]['a'][0]], [1, 2]) assert_tuple_equal(blobs[3]['b'][0, 0]['c'][0, 0].shape, (3, 3)) assert_true(np.array_equal(blobs[4], np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blobs[4].dtype == 'float64') assert_true(np.array_equal(blobs[5], np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blobs[5].dtype == 'uint8') assert_tuple_equal(blobs[6].shape, (2, 3, 4)) assert_true(blobs[6].dtype == 'complex128')
def test_old_success(): tmp = mkdtemp() warehouse = Warehouse(tmp) url = 'http://a.b/c' warehouse[url] = (None, 88) @cache(tmp) def get(_): raise AssertionError('This should not run.') observed_response = get(url) n.assert_equal(observed_response, 88) n.assert_tuple_equal(warehouse[(url,)], (None, 88))
def test_pindex_conersions(): value = common.get_pindex_from_band("BE-1234567", "2014") nt.assert_equal(value, "BE-12345672014") value = common.get_pindex_from_band("87-CUST-2222", "2014") nt.assert_equal(value, "87-CUST-22222014") value = common.get_pindex_from_band_string("BE-1234567/2014") nt.assert_equal(value, "BE-12345672014") value = common.get_pindex_from_band_string("BE-1234567 / 2014") nt.assert_equal(value, "BE-12345672014") value = common.get_band_from_pindex("BE-12345672014") nt.assert_tuple_equal(value, ("BE-1234567", "2014")) value = common.get_band_from_pindex("87-CUST-22222014") nt.assert_tuple_equal(value, ("87-CUST-2222", "2014"))
def test_complex_matlab_blobs(): """ test correct de-serialization of various blob types """ blobs = Blob().fetch('blob', order_by='id') assert_equal(blobs[0][0], 'character string') assert_true(np.array_equal(blobs[1][0], np.r_[1:180:15])) assert_list_equal([r[0] for r in blobs[2]], ['string1', 'string2']) assert_list_equal([r[0, 0] for r in blobs[3]['a'][0]], [1, 2]) assert_tuple_equal(blobs[3]['b'][0, 0]['c'][0, 0].shape, (3, 3)) assert_true(np.array_equal(blobs[4], np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blobs[4].dtype == 'float64') assert_true(np.array_equal(blobs[5], np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blobs[5].dtype == 'uint8') assert_tuple_equal(blobs[6].shape, (2, 3, 4)) assert_true(blobs[6].dtype == 'complex128')
def test_clip_dem_to_zones(): demfile = resource_filename("tidegates.testing.clip_dem_to_zones", 'test_dem.tif') zonefile = resource_filename("tidegates.testing.clip_dem_to_zones", "test_zones_raster_small.tif") raster = utils.clip_dem_to_zones(demfile, zonefile) zone_r = utils.load_data(zonefile, 'raster') arrays = utils.rasters_to_arrays(raster, zone_r) dem_a, zone_a = arrays[0], arrays[1] arcpy.management.Delete(raster) nt.assert_true(isinstance(raster, arcpy.Raster)) known_shape = (146, 172) nt.assert_tuple_equal(dem_a.shape, zone_a.shape)
def test_save(): # Make a simple-ish grid mg1 = RasterModelGrid(10,10,2.) z = mg1.add_zeros('node', 'topographic__elevation') z += mg1.node_x.copy() fa = FlowAccumulator(mg1, flow_director='D8') fa.run_one_step() save_grid(mg1, 'testsavedgrid.grid') mg2 = load_grid('testsavedgrid.grid') os.remove('testsavedgrid.grid') assert_tuple_equal(mg1.shape, mg2.shape) assert_tuple_equal((mg1.dy, mg1.dx), (mg2.dy, mg2.dx)) assert_array_equal(mg1.status_at_node, mg2.status_at_node) for name in mg1.at_node: assert_array_equal(mg1.at_node[name], mg2.at_node[name])
def test_2_components_dict_interface(): palette = Palette(one=Sample1, two=Sample2) assert_dict_equal(dict(one=Sample1, two=Sample2), palette) assert_equal(len(palette), 2) keys = palette.keys() keys.sort() assert_list_equal(['one', 'two'], keys) values = palette.values() assert_equal(2, len(values)) assert_true(Sample1 in values and Sample2 in values) items = palette.items() items.sort() assert_equal(2, len(items)) assert_tuple_equal(('one', Sample1), items[0]) assert_tuple_equal(('two', Sample2), items[1])
def test_notifyMe__exception_thrown_and_retry_fail(): subject = "some message subject" owner_id = '12434' user_profile = {'ownerId':owner_id} with patch.object(syn, "sendMessage") as mocked_send_message,\ patch.object(syn, "getUserProfile", return_value=user_profile) as mocked_get_user_profile: mocked_func = MagicMock(side_effect=[Exception('first time fails'), 'second time is Fine']) @notifyMe(syn, messageSubject=subject, retries=1) def test_function(): mocked_func() test_function() assert_equal(2, mocked_send_message.call_count) #call_args_list is a list of tuples, each tuple in the form (args,kwargs) first_call_args = mocked_send_message.call_args_list[0][0] first_call_kwargs = mocked_send_message.call_args_list[0][1] second_call_args = mocked_send_message.call_args_list[1][0] second_call_kwargs = mocked_send_message.call_args_list[1][1] assert_tuple_equal(([owner_id], subject), first_call_args) assert_in('Encountered a temporary Failure during upload', first_call_kwargs['messageBody']) assert_tuple_equal(([owner_id], subject), first_call_args) assert_equal(1, len(first_call_kwargs)) assert_in('Encountered a temporary Failure during upload', first_call_kwargs['messageBody']) assert_tuple_equal(([owner_id], subject), second_call_args) assert_equal(1, len(second_call_kwargs)) assert_equal("Call to test_function completed successfully!", second_call_kwargs['messageBody'])
def test_notifyMe__exception_thrown_and_retry_fail(): subject = "some message subject" owner_id = '12434' user_profile = {'ownerId': owner_id} with patch.object(syn, "sendMessage") as mocked_send_message,\ patch.object(syn, "getUserProfile", return_value=user_profile): mocked_func = MagicMock(side_effect=[Exception('first time fails'), 'second time is Fine']) @notifyMe(syn, messageSubject=subject, retries=1) def test_function(): mocked_func() test_function() assert_equal(2, mocked_send_message.call_count) # call_args_list is a list of tuples, each tuple in the form (args,kwargs) first_call_args = mocked_send_message.call_args_list[0][0] first_call_kwargs = mocked_send_message.call_args_list[0][1] second_call_args = mocked_send_message.call_args_list[1][0] second_call_kwargs = mocked_send_message.call_args_list[1][1] assert_tuple_equal(([owner_id], subject), first_call_args) assert_in('Encountered a temporary Failure during upload', first_call_kwargs['messageBody']) assert_tuple_equal(([owner_id], subject), first_call_args) assert_equal(1, len(first_call_kwargs)) assert_in('Encountered a temporary Failure during upload', first_call_kwargs['messageBody']) assert_tuple_equal(([owner_id], subject), second_call_args) assert_equal(1, len(second_call_kwargs)) assert_equal("Call to test_function completed successfully!", second_call_kwargs['messageBody'])
def test_Mat1f_ndarray_2d_dims(): for rows in range(1, 11): for cols in range(1, 11): x = np.random.rand(rows, cols) * 100. x = x.astype('float32') test_dummy = elm.Dummy() test_dummy.setMat1f(x) y = test_dummy.getMat1f() assert_equal(x.ndim, 2) assert_tuple_equal(x.shape, (rows, cols)) assert_is_instance(y, np.ndarray) assert_greater(y.size, 0) assert_equal(y.shape, x.shape) assert_true(np.all(x == y)) x += 10 assert_true(np.all(x == y)) assert_true(x is y)
def test_pickle(): # Make a simple-ish grid mg1 = RasterModelGrid(10,10,2.) z = mg1.add_zeros('node', 'topographic__elevation') z += mg1.node_x.copy() fa = FlowAccumulator(mg1, flow_director='D8') fa.run_one_step() # save it with pickle with open('testsavedgrid.grid', 'wb') as f: pickle.dump(mg1, f) # load it with pickle with open('testsavedgrid.grid', 'rb') as f: mg2 = pickle.load(f) os.remove('testsavedgrid.grid') assert_tuple_equal(mg1.shape, mg2.shape) assert_tuple_equal((mg1.dy, mg1.dx), (mg2.dy, mg2.dx)) assert_array_equal(mg1.status_at_node, mg2.status_at_node) for name in mg1.at_node: assert_array_equal(mg1.at_node[name], mg2.at_node[name])
def test_complex_matlab_squeeze(): """ test correct de-serialization of various blob types """ blob = (Blob & "id=1").fetch1( "blob", squeeze=True) # 'simple string' 'character string' assert_equal(blob, "character string") blob = (Blob & "id=2").fetch1( "blob", squeeze=True) # '1D vector' 1:15:180 assert_array_equal(blob, np.r_[1:180:15]) blob = (Blob & "id=3").fetch1( "blob", squeeze=True) # 'string array' {'string1' 'string2'} assert_true(isinstance(blob, dj.MatCell)) assert_array_equal(blob, np.array(["string1", "string2"])) blob = (Blob & "id=4").fetch1( "blob", squeeze=True ) # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) assert_true(isinstance(blob, dj.MatStruct)) assert_tuple_equal(blob.dtype.names, ("a", "b")) assert_array_equal( blob.a, np.array([ 1.0, 2, ]), ) assert_true(isinstance(blob[1].b, dj.MatStruct)) assert_tuple_equal(blob[1].b.C.item().shape, (5, 5)) blob = (Blob & "id=5").fetch1( "blob", squeeze=True) # '3D double array' reshape(1:24, [2,3,4]) assert_true( np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F"))) assert_true(blob.dtype == "float64") blob = (Blob & "id=6").fetch1( "blob", squeeze=True) # reshape(uint8(1:24), [2,3,4]) assert_true( np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F"))) assert_true(blob.dtype == "uint8") blob = (Blob & "id=7").fetch1( "blob", squeeze=True) # fftn(reshape(1:24, [2,3,4])) assert_tuple_equal(blob.shape, (2, 3, 4)) assert_true(blob.dtype == "complex128")
def test_complex_matlab_blobs(): """ test correct de-serialization of various blob types """ blobs = Blob().fetch('blob', order_by='KEY') blob = blobs[0] # 'simple string' 'character string' assert_equal(blob[0], 'character string') blob = blobs[1] # '1D vector' 1:15:180 assert_array_equal(blob, np.r_[1:180:15][None, :]) assert_array_equal(blob, unpack(pack(blob))) blob = blobs[2] # 'string array' {'string1' 'string2'} assert_true(isinstance(blob, dj.MatCell)) assert_array_equal(blob, np.array([['string1', 'string2']])) assert_array_equal(blob, unpack(pack(blob))) blob = blobs[ 3] # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) assert_true(isinstance(blob, dj.MatStruct)) assert_tuple_equal(blob.dtype.names, ('a', 'b')) assert_array_equal(blob.a[0, 0], np.array([[1.]])) assert_array_equal(blob.a[0, 1], np.array([[2.]])) assert_true(isinstance(blob.b[0, 1], dj.MatStruct)) assert_tuple_equal(blob.b[0, 1].C[0, 0].shape, (5, 5)) b = unpack(pack(blob)) assert_array_equal(b[0, 0].b[0, 0].c, blob[0, 0].b[0, 0].c) assert_array_equal(b[0, 1].b[0, 0].C, blob[0, 1].b[0, 0].C) blob = blobs[4] # '3D double array' reshape(1:24, [2,3,4]) assert_array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order='F')) assert_true(blob.dtype == 'float64') assert_array_equal(blob, unpack(pack(blob))) blob = blobs[5] # reshape(uint8(1:24), [2,3,4]) assert_true( np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blob.dtype == 'uint8') assert_array_equal(blob, unpack(pack(blob))) blob = blobs[6] # fftn(reshape(1:24, [2,3,4])) assert_tuple_equal(blob.shape, (2, 3, 4)) assert_true(blob.dtype == 'complex128') assert_array_equal(blob, unpack(pack(blob)))
def test_complex_matlab_squeeze(): """ test correct de-serialization of various blob types """ blob = (Blob & 'id=1').fetch1( 'blob', squeeze=True) # 'simple string' 'character string' assert_equal(blob, 'character string') blob = (Blob & 'id=2').fetch1( 'blob', squeeze=True) # '1D vector' 1:15:180 assert_array_equal(blob, np.r_[1:180:15]) blob = (Blob & 'id=3').fetch1( 'blob', squeeze=True) # 'string array' {'string1' 'string2'} assert_true(isinstance(blob, dj.MatCell)) assert_array_equal(blob, np.array(['string1', 'string2'])) blob = (Blob & 'id=4').fetch1( 'blob', squeeze=True ) # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) assert_true(isinstance(blob, dj.MatStruct)) assert_tuple_equal(blob.dtype.names, ('a', 'b')) assert_array_equal(blob.a, np.array([ 1., 2, ])) assert_true(isinstance(blob[1].b, dj.MatStruct)) assert_tuple_equal(blob[1].b.C.item().shape, (5, 5)) blob = (Blob & 'id=5').fetch1( 'blob', squeeze=True) # '3D double array' reshape(1:24, [2,3,4]) assert_true( np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blob.dtype == 'float64') blob = (Blob & 'id=6').fetch1( 'blob', squeeze=True) # reshape(uint8(1:24), [2,3,4]) assert_true( np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order='F'))) assert_true(blob.dtype == 'uint8') blob = (Blob & 'id=7').fetch1( 'blob', squeeze=True) # fftn(reshape(1:24, [2,3,4])) assert_tuple_equal(blob.shape, (2, 3, 4)) assert_true(blob.dtype == 'complex128')
def test_parse_authorization_header(): credentials = ('foo', 'bar') encoded = base64.b64encode(':'.join(credentials).encode('utf-8')) header = 'Basic ' + encoded.decode('utf-8') decoded = services.BaseService._parse_authorization_header(header) assert_tuple_equal(decoded, credentials)