def test_rosconsole__logging_format(): this_file = os.path.abspath(__file__) # this is necessary to avoid test fails because of .pyc cache file base, ext = os.path.splitext(this_file) if ext == '.pyc': this_file = base + '.py' for i, loc in enumerate(['module', 'function', 'method']): if loc == 'module': function = '<module>' elif loc == 'function': function = 'logging_on_function' elif loc == 'method': function = 'LoggingOnClass.__init__' else: raise ValueError log_out = ' '.join([ 'INFO', 'on ' + loc, r'[0-9]*\.[0-9]*', r'[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}', '[0-9]*', 'rosout', re.escape(this_file), '[0-9]*', function, # depending if rospy.get_name() is available '(/unnamed|<unknown_node_name>)', r'[0-9]*\.[0-9]*', r'[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}', ]) assert_regexp_matches(lout.getvalue().splitlines()[i], log_out)
def test_compile_and_call_id(): # Alice should be able to compile a sophia contract with an identity # function into bytecode and call it. test_settings = settings["test_compile_and_call_id"] (root_dir, node, api) = setup_node(test_settings, "alice") # Read a contract currentFile = __file__ dirPath = os.path.dirname(currentFile) contract_file = open(dirPath + "/identity.aes", "r") contract_string = contract_file.read() # Compile contract to bytecode contract = Contract(contract_string, "") compilation_result = api.compile_contract(contract) assert_regexp_matches(compilation_result.bytecode, '0x.*') # Call contract bytecode call_input = ContractCallInput("sophia", compilation_result.bytecode, "main", "42") call_result = api.call_contract(call_input) assert_regexp_matches(call_result.out, '0x.*') # stop node common.stop_node(node) shutil.rmtree(root_dir)
def test_inhomogenous_3(): parameters = ParameterNode(directory_path=LOCAL_DIR) parameters.rate.couple.tenant.add_child( 'z4', ParameterNode('toto', data={ 'amount': { 'values': { "2015-01-01": { 'value': 550 }, "2016-01-01": { 'value': 600 } } } })) P = parameters.rate('2015-01-01') zone = np.asarray(['z1', 'z2', 'z2', 'z1']) try: P.couple.tenant[zone] except ValueError as e: assert_in("'rate.couple.tenant.z4' is a node", get_message(e)) assert_regexp_matches(get_message(e), r"'rate.couple.tenant.z(1|2|3)' is not") raise
def check_release(package, data): for key in data.keys(): assert_not_in(key, ['version', 'date', 'url'], 'The version, date and ' + \ 'url keys should not be used in the main repository since a pull ' + \ 'request would be necessary for every release') assert_in(key, ['details', 'sublime_text', 'platforms']) if key in ['details', 'url']: assert_regexp_matches(data[key], '^https?://') if key == 'sublime_text': assert_regexp_matches(data[key], '^(\*|<=?\d{4}|>=?\d{4})$') if key == 'platforms': assert_in(type(data[key]), [str, list]) if type(data[key]) == str: assert_in(data[key], ['*', 'osx', 'linux', 'windows']) else: for platform in data[key]: assert_in(platform, ['*', 'osx', 'linux', 'windows']) assert_in('details', data, 'A release must have a "details" key if it is in ' + \ 'the main repository. For custom releases, a custom repository.json ' + \ 'file must be hosted elsewhere.')
def test_rosconsole__logging_format(): this_file = os.path.abspath(__file__) # this is necessary to avoid test fails because of .pyc cache file base, ext = os.path.splitext(this_file) if ext == '.pyc': this_file = base + '.py' for i, loc in enumerate(['module', 'function', 'method']): if loc == 'module': function = '<module>' elif loc == 'function': function = 'logging_on_function' elif loc == 'method': function = 'LoggingOnClass.__init__' else: raise ValueError log_out = ' '.join([ 'INFO', 'on ' + loc, '[0-9]*\.[0-9]*', '[0-9]*', 'rosout', this_file, '[0-9]*', function, '/unnamed', '[0-9]*\.[0-9]*', ]) assert_regexp_matches(f.getvalue().splitlines()[i], log_out)
def test_merge_request_detail_view(self): r, mr_num = self._request_merge() assert 'wants to merge' in r, r.showbrowser() assert 'Improve documentation' in r, r.showbrowser() revs = r.html.findAll('tr', attrs={'class': 'rev'}) assert_equal(len(revs), 1) rev_links = revs[0].findAll('a', attrs={'class': 'rev'}) browse_links = revs[0].findAll('a', attrs={'class': 'browse'}) c_id = self.forked_repo.get_heads()[0]['object_id'] assert_equal(rev_links[0].get('href'), '/p/test2/code/ci/%s/' % c_id) assert_equal(rev_links[0].getText(), '[%s]' % c_id[:6]) assert_equal(browse_links[0].get('href'), '/p/test2/code/ci/%s/tree' % c_id) assert_equal(browse_links[0].getText(), 'Tree') merge_instructions = r.html.findAll('textarea')[0].getText() assert_in('git checkout master', merge_instructions) assert_in('git fetch /srv/git/p/test2/code master', merge_instructions) assert_in('git merge {}'.format(c_id), merge_instructions) assert_regexp_matches(str(r), r'[0-9]+ seconds? ago') merge_form = r.html.find('div', {'class': 'merge-help-text merge-ok'}) assert merge_form assert_in( 'Merge request has no conflicts. You can merge automatically.', merge_form.getText())
def test_dump_bash_set_root_device(self): i = Inventory('../config/inventory/pod_example_settings.yaml') out = StringIO() sys.stdout = out i.dump_bash() output = out.getvalue().strip() assert_regexp_matches(output, 'root_disk_list=sdb')
def test_dump_bash_default(self): i = Inventory('../config/inventory/intel_pod2_settings.yaml') out = StringIO() sys.stdout = out i.dump_bash() output = out.getvalue().strip() assert_regexp_matches(output, 'root_disk_list=sda')
def test_copy_verse_range(out): '''should copy reference content for verse range''' yvs.main('111/psa.23.1-2') ref_content = out.getvalue() assert_regexp_matches(ref_content, 'Lorem') assert_regexp_matches(ref_content, 'nunc nulla') assert_not_regexp_matches(ref_content, 'fermentum')
def test_local_mode(): mailer = create_mailer({"mode": "local"}) mailer.find_core = lambda: __file__ mailer.get_trace = lambda _: "some traces" mailer.out = StringIO() mailer.run() assert_regexp_matches(mailer.out.getvalue(), "some traces")
def test_display_addr_refs(): """should display table of address references""" sim = Simulator() refs = sim.get_addr_refs(word_addrs=WORD_ADDRS, num_addr_bits=8, num_tag_bits=5, num_index_bits=2, num_offset_bits=1) apply_cache_statuses_to_refs(['miss', 'miss', 'HIT', 'miss'], refs) out = io.StringIO() with contextlib.redirect_stdout(out): sim.display_addr_refs(refs, table_width=TABLE_WIDTH) table_output = out.getvalue() num_cols = 6 col_width = TABLE_WIDTH // num_cols nose.assert_regexp_matches( table_output, r'{}\s*{}\s*{}\s*{}\s*{}\s*{}\n{}'.format( 'WordAddr'.rjust(col_width), 'BinAddr'.rjust(col_width), 'Tag'.rjust(col_width), 'Index'.rjust(col_width), 'Offset'.rjust(col_width), 'Hit/Miss'.rjust(col_width), ('-' * TABLE_WIDTH))) nose.assert_regexp_matches( table_output, r'{}\s*{}\s*{}\s*{}\s*{}\s*{}'.format( '253'.rjust(col_width), '1111 1101'.rjust(col_width), '11111'.rjust(col_width), '10'.rjust(col_width), '1'.rjust(col_width), 'HIT'.rjust(col_width)))
def test_tab_about(self): # look for serial number about_page = self.app.get("/config/about/") assert_equal(about_page.status_int, 200) assert_regexp_matches( about_page.body, r"|".join(self.AVAILABLE_DEVICES), "This test suite is not adjusted for this device.", ) sn_match = re.search(r"<td>(\d+)</td>", about_page.body) assert_true(sn_match) try: sn = int(sn_match.group(1)) except ValueError: raise AssertionError("Router serial number is not integer.") # should work on routers from first production Turris 1.0 till new Turris 1.1 in_range = False for first, last in self.AVAILABLE_SERIALS: if first < sn < last: in_range = True assert_true( in_range, "Serial %d not in range %s " % (sn, ", ".join([repr(e) for e in self.AVAILABLE_SERIALS])), )
def test_inherit(): # _shared is not included in apps that don't explicitly define it data = dict(cc.parse('test', CWD)) for key in data: nt.assert_regexp_matches(key, r'^test.*') for app in ['test/app2']: nt.assert_equal(data[app], {'key1': 'val1'}) # _shared should not get included in apps that define _inherit for app in ['test', 'test/app1', 'test/app3']: nt.assert_equal(data[app], {}) nt.assert_dict_equal(data['test/app4'], {'key2': 'val2'}) nt.assert_dict_equal(data['test/app5'], {'key1': 'val1', 'key2': 'val2'}) nt.assert_dict_equal(data['test/app6'], {"key1": "val11"}) nt.assert_dict_equal(data['test/app7'], {'key1': 'val11', 'key2': 'val2'}) nt.assert_dict_equal(data['test/app8'], {'key1': 'val1', 'key2': 'val22'}) nt.assert_dict_equal(data['test/app9'], {'key1': 'val1', 'key2': 'val222'}) nt.assert_dict_equal(data['test/app20'], {'key': 'value'}) nt.assert_dict_equal(data['test/app21'], {'key': 'value', 'key1': 'val1'}) nt.assert_dict_equal(data['test/app22'], {'key1': 'val1'}) data = dict(cc.parse('test-ns2', CWD)) nt.assert_dict_equal(data['test-ns2'], {'key1': 'val1'})
def test_registration_code(self): res = self.app.get("/config/about/ajax?action=registration_code", headers=XHR_HEADERS) payload = res.json assert_true(payload['success']) # check that code is not empty assert_regexp_matches(payload['data'], r"[0-9A-F]{8}")
def test_whitespace_words(): """should handle spaces appropriately""" ref_content = yvs.get_copied_ref('111/psa.23') nose.assert_regexp_matches(ref_content, 'adipiscing elit.', 'should respect content consisting of spaces') nose.assert_regexp_matches(ref_content, 'consectetur adipiscing', 'should collapse consecutive spaces')
def test_get_links(mock_retrieve): """ How are search hits links split from the results page """ query = "Finalize object in Scala" # these were the answers on August 5 2015 mock_retrieve.return_value = read_fixed_data('search_hits_scala.html') search_hits = pbs.lookup.get_search_hits(query) links = search_hits.links nt.assert_equal(10, len(links)) expected_regexp = ( r'/url\?q=' # matched literally r'http://stackoverflow\.com' # matched literally with escaped dot r'/questions/\d+' # question id r'/[a-z\-]+' # question title r'&sa=U&ved=\w{40}&usg=\S{34}' # params: two hashes ) for link in links: nt.assert_regexp_matches(link, expected_regexp) expected_titles = [ 'how-to-write-a-class-destructor-in-scala', 'when-is-the-finalize-method-called-in-java', 'is-there-a-destructor-for-java', 'java-memory-leak-destroy-finalize-object', 'what-guarantees-does-java-scala-make-about-garbage-collection', 'what-is-the-best-way-to-clean-up-an-object-in-java', 'what-is-the-cause-of-this-strange-scala-memory-leak', 'java-executing-a-method-when-objects-scope-ends', 'luajava-call-methods-on-lua-object-from-java', 'how-to-prevent-an-object-from-getting-garbage-collected' ] for link, title in zip(links, expected_titles): nt.assert_true(title in link)
def test_non_sysadmin_changes_related_items_featured_field_fails(self): '''Non-sysadmins cannot change featured field''' context = { 'model': model, 'user': '******', 'session': model.Session } data_dict = { 'title': 'Title', 'description': 'Description', 'type': 'visualization', 'url': 'http://ckan.org', 'image_url': 'http://ckan.org/files/2012/03/ckanlogored.png', } # Create the related item as annafan result = logic.get_action('related_create')(context, data_dict) # Try to change it to a featured item result['featured'] = 1 try: logic.get_action('related_update')(context, result) except logic.NotAuthorized, e: # Check it's the correct authorization error assert_regexp_matches(str(e), 'featured')
def test_progress_percentage(cleanup_tempfiles): class ProgressCallbackInvoker(BaseSubscriber): """A back-compat wrapper to invoke a provided callback via a subscriber :param callback: A callable that takes a single positional argument for how many bytes were transferred. """ def __init__(self, callback): self._callback = callback def on_progress(self, bytes_transferred, **kwargs): self._callback(bytes_transferred) # create dummy file tf = NamedTemporaryFile(delete=False, suffix='.tgz') cleanup_tempfiles.append(tf.name) open(tf.name, 'w').write('some content here...') out = StringIO() # instantiate ProgressReporter callback = ProgressPercentage(tf.name, out=out) subscriber = ProgressCallbackInvoker(callback) # 1 byte -> 5% time.sleep(0.001) subscriber.on_progress(bytes_transferred=1) assert_regexp_matches(out.getvalue().strip(), '.*\.tgz 1 / 20\.0 \(5\.00%\)') # 11 (1+10) byte -> 55% subscriber.on_progress(bytes_transferred=10) assert_regexp_matches(out.getvalue().strip(), '.*\.tgz 11 / 20\.0 \(55\.00%\)')
def test_img_metadata(): "should have image metadata" list_img_path = get_tool_img_path() assert_true(len(list_img_path) > 0) for img_filename in list_img_path: tool_name, tool_version, _ = img_filename.replace(IMG_PATH, "").split("/") process = subprocess.Popen([ "singularity", "exec", img_filename, "cat", "/.roslin/labels.json" ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, _ = process.communicate() assert_equals( process.returncode, 0, "{} {} ({})".format(tool_name, tool_version, img_filename)) metadata = json.loads(stdout.rstrip("\n")) assert_regexp_matches(metadata["version.image"], re.compile(r"\b\d+(?:\.[\d\w-]+)*\b"), tool_name + " " + tool_version)
def validate_string_in(self, datajson, path, regex): response_dict = self.dj.validate_catalog(datajson) p = re.compile(regex) response = response_dict.copy() for key in path: response = response[key] assert_regexp_matches(response, p)
def test_invalid_msg_from_string(): with assert_raises(IpcMessageException) as cm: invalid_msg = IpcMessage( from_str="{\"wibble\" : \"wobble\" \"shouldnt be here\"}") ex = cm.exception assert_regexp_matches(ex.msg, "Illegal message JSON format*")
def test_simple_cloudformation_stack_during_update_default(): cf = _create_simple_cf() stack_policy = _get_stack_policy_during_update(cf, False) assert_regexp_matches(stack_policy, '{"Statement":') assert_equal('Deny', json.loads(stack_policy)['Statement'][1]['Effect']) assert_list_equal(["Update:Replace", "Update:Delete"], json.loads(stack_policy)['Statement'][1]['Action'])
def test_google_search(): global browser browser.go_to('http://www.google.com') browser.wait_for_page(Google.Home) browser.click_and_type(Google.Home.Search_Query_Text_Field, "Slick Test Manager") browser.click(Google.Home.Search_Button) browser.wait_for_page(Google.SearchResults) assert_regexp_matches(browser.get_page_text(), '.*SlickQA:.*')
def test_prepare_path(): import os import shutil base_path = '~/.tidml/prepared_path/' file_name = 'the_file' shutil.rmtree(os.path.expanduser(base_path), ignore_errors=True) pp = prepare_path(base_path + file_name) nt.assert_regexp_matches(pp, '/Users/(.)+/\.tidml/prepared_path/the_file')
def test_existing_manager_already_exists(self): # Attempt to create a shared buffer manager that already exists with assert_raises(SharedBufferManagerException) as cm: clobbered_shared_buffer = SharedBufferManager(shared_mem_name, 100, 100, True, boost_mmap_mode=boost_mmap_mode) ex = cm.exception assert_regexp_matches(ex.msg, "Shared memory with the specified name already exists")
def test_existing_manager_absent(self): # Attempt to map a shared buffer manager that doesn't already exist absent_manager_name = "AbsentBufferManager" with assert_raises(SharedBufferManagerException) as cm: existing_shared_buffer = SharedBufferManager(absent_manager_name, boost_mmap_mode=boost_mmap_mode) ex = cm.exception assert_regexp_matches(ex.msg, "No shared memory exists with the specified name")
def test_404_error_is_raise_when_vm_dont_exist(self): """ Test that get raises an 404 error if the VM does not exist """ self.server.set_xml_response("vms/123", 404, "") with assert_raises(ovirtsdk4.NotFoundError) as context: self.vms_service.vm_service('123').get() assert_regexp_matches(str(context.exception), "404")
def test_get_metrics(awsclient, vendored_folder, temp_lambda, capsys): log.info('running test_get_metrics') get_metrics(awsclient, temp_lambda[0]) out, err = capsys.readouterr() assert_regexp_matches( out.strip(), 'Duration 0\\n\\tErrors 0\\n\\tInvocations [0,1]{1}\\n\\tThrottles 0')
def test_whitespace_words(out): '''should handle spaces appropriately''' yvs.main('111/psa.23') ref_content = out.getvalue() assert_regexp_matches(ref_content, 'adipiscing elit.', 'should respect content consisting of spaces') assert_regexp_matches(ref_content, 'consectetur adipiscing', 'should collapse consecutive spaces')
def test_invalid_msg_from_string(): with assert_raises(IpcMessageException) as cm: invalid_msg = IpcMessage(from_str="{\"wibble\" : \"wobble\" \"shouldnt be here\"}") ex = cm.exception assert_regexp_matches(ex.msg, "Illegal message JSON format*")
def test_merge_request_list_view(self): r, mr_num = self._request_merge() r = self.app.get('/p/test/src-git/merge-requests/') assert 'href="%s/"' % mr_num in r, r assert_regexp_matches( r.html.findAll('span')[-2].getText(), r'[0-9]+ seconds? ago') assert_regexp_matches( r.html.findAll('span')[-1].getText(), r'[0-9]+ seconds? ago')
def test_not_loadable(self): base = os.path.dirname(__file__) exp_path = os.path.join(base, 'fixtures', 'not_loadable.h5') s = load_solver(exp_path, 'main') len(s) # ensure we can load the events # check that solver_info is set: info = s.store['solver_info'] nt.assert_regexp_matches(info['system_class'], 'NoSystem')
def assert_eq_dic(self, dict1, dict2): # Compare two dictionaries for x in dict2.keys(): if not isinstance(dict2[x], dict): if isinstance(dict2[x], list): self.assert_eq_dic_lst(dict1[x], dict2[x]) else: assert_regexp_matches(str(dict1[x]), str(dict2[x]), msg="Error in '{k}': '{v1}' not matched '{v2}'".format(k = x, v1 = str(dict1[x]), v2 = str(dict2[x]))) else: self.assert_eq_dic(dict1[x], dict2[x])
def test_super_repr(): # "<super: module_name.SA, None>" output = pretty.pretty(super(SA)) nt.assert_regexp_matches(output, r"<super: \S+.SA, None>") # "<super: module_name.SA, <module_name.SB at 0x...>>" sb = SB() output = pretty.pretty(super(SA, sb)) nt.assert_regexp_matches(output, r"<super: \S+.SA,\s+<\S+.SB at 0x\S+>>")
def find_grade_report_csv_link(step): # pylint: disable=unused-argument # Need to reload the page to see the grades download table reload_the_page(step) world.wait_for_visible('#report-downloads-table') # Find table and assert a .csv file is present expected_file_regexp = 'edx_999_Test_Course_grade_report_\d{4}-\d{2}-\d{2}-\d{4}\.csv' assert_regexp_matches(world.css_html('#report-downloads-table'), expected_file_regexp, msg="Expected grade report filename was not found.")
def verify_report_is_generated(report_name_substring): # Need to reload the page to see the reports table updated reload_the_page(step) world.wait_for_visible("#report-downloads-table") # Find table and assert a .csv file is present quoted_id = http.urlquote(world.course_key).replace("/", "_") expected_file_regexp = quoted_id + "_" + report_name_substring + "_\d{4}-\d{2}-\d{2}-\d{4}\.csv" assert_regexp_matches( world.css_html("#report-downloads-table"), expected_file_regexp, msg="Expected report filename was not found." )
def find_grade_report_csv_link(step): # pylint: disable=unused-argument # Need to reload the page to see the grades download table reload_the_page(step) world.wait_for_visible('#report-downloads-table') # Find table and assert a .csv file is present expected_file_regexp = 'edx_999_Test_Course_grade_report_\d{4}-\d{2}-\d{2}-\d{4}\.csv' assert_regexp_matches( world.css_html('#report-downloads-table'), expected_file_regexp, msg="Expected grade report filename was not found." )
def find_grade_report_csv_link(step): # pylint: disable=unused-argument # Need to reload the page to see the grades download table reload_the_page(step) world.wait_for_visible('#report-downloads-table') # Find table and assert a .csv file is present quoted_id = http.urlquote(world.course_key).replace('/', '_') expected_file_regexp = quoted_id + '_grade_report_\d{4}-\d{2}-\d{2}-\d{4}\.csv' assert_regexp_matches( world.css_html('#report-downloads-table'), expected_file_regexp, msg="Expected grade report filename was not found." )
def test_main(): """main function should produce some output""" out = io.StringIO() with contextlib.redirect_stdout(out): sim.main() main_output = out.getvalue() nose.assert_regexp_matches(main_output, r'\bWordAddr\b') nose.assert_regexp_matches(main_output, r'\b0110\b') nose.assert_regexp_matches(main_output, r'\bCache') nose.assert_regexp_matches(main_output, r'\b01\b') nose.assert_regexp_matches(main_output, r'\b8\s*6\b')
def test_user_can_login(self, username, password): home_page = HomePage(self.browser) home_page.navigate_to() login_form = home_page.get_login_form() login_form.submit_form_with( username, password ) assert_regexp_matches(self.browser.current_url, ".*/calendar", "Page did not load") nav_links = HomePage(self.browser).get_nav_links() nav_links.logout()
def test_list_multiple(self): """List multiple contacts on different frequencies.""" output = self.run_commands("""7000 n0fn 599 14000 CW n0fn 339 list """) assert "Time" in output assert_regexp_matches(output, r'7000\s+PH\s+n0fn\s+599') assert_regexp_matches(output, r'14000\s+CW\s+n0fn\s+339')
def test_display_addr_refs_no_index(): """should display n/a for index when there are no index bits""" refs = sim.get_addr_refs( word_addrs=WORD_ADDRS, num_addr_bits=8, num_tag_bits=7, num_index_bits=0, num_offset_bits=1) ref_statuses = ['miss', 'miss', 'miss', 'miss'] out = io.StringIO() with contextlib.redirect_stdout(out): sim.display_addr_refs(refs, ref_statuses, table_width=TABLE_WIDTH) table_output = out.getvalue() nose.assert_regexp_matches( table_output, r'\s*{}\s*{}\s*{}'.format( 'n/a', '\d', 'miss'))
def test_display_addr_refs_no_offset(): """should display n/a for offset when there are no offset bits""" refs = sim.get_addr_refs( word_addrs=WORD_ADDRS, num_addr_bits=8, num_tag_bits=4, num_index_bits=4, num_offset_bits=0) ref_statuses = ['miss'] * 12 out = io.StringIO() with contextlib.redirect_stdout(out): sim.display_addr_refs(refs, ref_statuses) table_output = out.getvalue() nose.assert_regexp_matches( table_output, r'\s*{}\s*{}\s*{}'.format( '\d\d', 'n/a', 'miss'))
def check_package(data): for key in data.keys(): assert_in(key, ['name', 'details', 'releases', 'homepage', 'author', 'readme', 'issues', 'donate', 'buy', 'previous_names', 'labels']) assert_equal(type(data[key]), map_key_type(key)) if key in ['details', 'homepage', 'readme', 'issues', 'donate', 'buy']: assert_regexp_matches(data[key], '^https?://') if 'details' not in data: assert_in('name', data, 'The key "name" is required if no "details" URL provided') assert_in('homepage', data, 'The key "homepage" is required if no "details" URL provided') assert_in('author', data, 'The key "author" is required if no "details" URL provided') assert_in('releases', data, 'The key "releases" is required if no "details" URL provided')
def test_external_refs(self): """If a bundle contains absolute paths outside of the media directory, to generate a url they are copied in. """ try: from nose.tools import assert_regexp_matches except ImportError: raise SkipTest("Assertion method only present in 2.7+") self.env.debug = True with TempDirHelper() as h: h.create_files(['foo.css']) bundle = self.mkbundle(h.path('foo.css')) urls = bundle.urls() assert len(urls) == 1 assert_regexp_matches(urls[0], r'.*/webassets-external/\d*_foo.css')
def test_tab_about(self): # look for serial number about_page = self.app.get("/config/about/") assert_equal(about_page.status_int, 200) assert_regexp_matches(about_page.body, r"<td>Turris - RTRS0[12]</td>", "This test suite is not adjusted for this device.") sn_match = re.search(r"<td>(\d+)</td>", about_page.body) assert_true(sn_match) try: sn = int(sn_match.group(1)) except ValueError: raise AssertionError("Router serial number is not integer.") # should work on routers from first production Turris 1.0 till new Turris 1.1 assert_greater(sn, 0x499999999) assert_less(sn, 0x900F00000)
def _assert_exception(fun, *args, **kwargs): matching = kwargs.pop('matching', None) expected = kwargs['expected'] try: if len(args) == 0: fun(None) else: fun(*args) except expected as e: if matching is not None: assert_regexp_matches(str(e), matching) except Exception as e: raise e else: assert False, "Expecting query to raise an exception, but nothing was raised."
def test_describe_customer(self, pager): self.setup_mock_describe_db(pager) self.ip.describe('customer') output = self.pagerio.getvalue() nt.assert_regexp_matches(output.decode('utf8'), r'\*id.*INTEGER.*NOT NULL') nt.assert_regexp_matches(output.decode('utf8'), r'name.*INTEGER.*NOT NULL') nt.assert_regexp_matches( output.decode('utf8'), r'company_id\s+\|\s+INTEGER\s+\|\s+NULL') pkre = re.compile(r'Primary Key \(\*\)\n\-+\s+id', re.M | re.I) nt.assert_regexp_matches(output.decode('utf8'), pkre) fkre = re.compile( 'Foreign Keys\n\-+\s+' 'customer\(company_id\) references company\(id\)', re.M | re.I) nt.assert_regexp_matches(output.decode('utf8'), fkre)
def test_handle_two_addrs_slash_notation_communicate_no(print_addr_details): """Should print info for two IP addresses in slash notation.""" out = io.StringIO() with contextlib.redirect_stdout(out): cidrbrewer.handle_two_addrs('125.47.32.170/25', '125.47.32.53/25') output = out.getvalue() nose.assert_regexp_matches(output, r'{}\n\s+{}\s+{}\n\s+{}\s+{}'.format( 'Given IP addresses:', r'125\.47\.32\.170/25', r'01111101\.00101111\.00100000\.10101010', r'125\.47\.32\.53/25', r'01111101\.00101111\.00100000\.00110101'), 'Given IP addresses not printed') nose.assert_regexp_matches(output, r'{}\n\s+{}'.format( r'Can these IP addresses communicate\?', 'No'), '"Can communicate" message not printed') print_addr_details.assert_called_once_with( '01111101001011110010000010101010', 24)