def test_assert_cwd_unchanged_not_masking_exceptions(): # Test that we are not masking out other "more important" exceptions orig_cwd = os.getcwd() @assert_cwd_unchanged def do_chdir_value_error(): os.chdir(os.pardir) raise ValueError("error exception") with swallow_logs() as cml: with assert_raises(ValueError) as cm: do_chdir_value_error() # retrospect exception if PY2: # could not figure out how to make it legit for PY3 # but on manual try -- works, and exception traceback is not masked out exc_info = sys.exc_info() assert_in('raise ValueError("error exception")', traceback.format_exception(*exc_info)[-2]) eq_(orig_cwd, os.getcwd(), "assert_cwd_unchanged didn't return us back to %s" % orig_cwd) assert_in("Mitigating and changing back", cml.out) # and again but allowing to chdir @assert_cwd_unchanged(ok_to_chdir=True) def do_chdir_value_error(): os.chdir(os.pardir) raise ValueError("error exception") with swallow_logs() as cml: assert_raises(ValueError, do_chdir_value_error) eq_(orig_cwd, os.getcwd(), "assert_cwd_unchanged didn't return us back to %s" % orig_cwd) assert_not_in("Mitigating and changing back", cml.out)
def _req(cls, method, *args, **kwargs): use_token = kwargs.pop('use_token', True) token = kwargs.pop('token', None) if use_token and token is None: admin = kwargs.pop('admin', False) if admin: if cls._admin_token is None: cls._admin_token = get_auth_token(ADMIN_USERNAME, ADMIN_PASSWORD) token = cls._admin_token else: if cls._token is None: cls._token = get_auth_token(USERNAME, PASSWORD) token = cls._token if use_token: headers = kwargs.get('headers', {}) headers.setdefault('Authorization', 'Token ' + token) kwargs['headers'] = headers expected = kwargs.pop('expected', 200) resp = requests.request(method, *args, **kwargs) if expected is not None: if hasattr(expected, '__iter__'): assert_in(resp.status_code, expected, "Expected http status in %s, received %s" % (expected, resp.status_code)) else: assert_equal(resp.status_code, expected, "Expected http status %s, received %s" % (expected, resp.status_code)) return resp
def test_13_dataset_fail_iso19139_schema_2(self): # This test Dataset has srv tags and only Service metadata should. errors = self.get_validation_errors(validation.ISO19139EdenSchema, 'gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml') assert len(errors) > 0 assert_in('(gmx.xsd)', errors) assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
def find_student_profile_table(step): # pylint: disable=unused-argument # Find the grading configuration display world.wait_for_visible('#data-student-profiles-table') # Wait for the data table to be populated world.wait_for(lambda _: world.css_text('#data-student-profiles-table') not in [u'', u'Loading']) if world.role == 'instructor': expected_data = [ world.instructor.username, world.instructor.email, world.instructor.profile.name, world.instructor.profile.gender, world.instructor.profile.goals ] elif world.role == 'staff': expected_data = [ world.staff.username, world.staff.email, world.staff.profile.name, world.staff.profile.gender, world.staff.profile.goals ] for datum in expected_data: assert_in(datum, world.css_text('#data-student-profiles-table'))
def test_iso19139_failure(self): errors = self.get_validation_errors(validation.ISO19139Schema, 'iso19139/dataset-invalid.xml') assert len(errors) > 0 assert_in('Dataset schema (gmx.xsd)', errors) assert_in('{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
def test_many_pages(): """Test that fits 100 items on seven pages consisting of 15 items.""" items = range(100) page = paginate.Page(items, page=0, items_per_page=15) url = "http://example.org/foo/page=$page" # eq_(page.collection_type, range) <-- py3 only assert_in(page.collection_type, (range, list)) eq_(page.page, 1) eq_(page.first_item, 1) eq_(page.last_item, 15) eq_(page.first_page, 1) eq_(page.last_page, 7) eq_(page.previous_page, None) eq_(page.next_page, 2) eq_(page.items_per_page, 15) eq_(page.item_count, 100) eq_(page.page_count, 7) eq_( page.pager(url=url), '1 <a href="http://example.org/foo/page=2">2</a> <a href="http://example.org/foo/page=3">3</a> .. <a href="http://example.org/foo/page=7">7</a>', ) eq_( page.pager(url=url, separator="_"), '1_<a href="http://example.org/foo/page=2">2</a>_<a href="http://example.org/foo/page=3">3</a>_.._<a href="http://example.org/foo/page=7">7</a>', ) eq_( page.pager( url=url, link_attr={"style": "linkstyle"}, curpage_attr={"style": "curpagestyle"}, dotdot_attr={"style": "dotdotstyle"}, ), '<span style="curpagestyle">1</span> <a href="http://example.org/foo/page=2" style="linkstyle">2</a> <a href="http://example.org/foo/page=3" style="linkstyle">3</a> <span style="dotdotstyle">..</span> <a href="http://example.org/foo/page=7" style="linkstyle">7</a>', )
def test_mail_user(self): user = factories.User() user_obj = model.User.by_name(user['name']) msgs = self.get_smtp_messages() assert_equal(msgs, []) # send email test_email = {'recipient': user_obj, 'subject': 'Meeting', 'body': 'The meeting is cancelled.', 'headers': {'header1': 'value1'}} mailer.mail_user(**test_email) # check it went to the mock smtp server msgs = self.get_smtp_messages() assert_equal(len(msgs), 1) msg = msgs[0] assert_equal(msg[1], config['smtp.mail_from']) assert_equal(msg[2], [user['email']]) assert test_email['headers'].keys()[0] in msg[3], msg[3] assert test_email['headers'].values()[0] in msg[3], msg[3] assert test_email['subject'] in msg[3], msg[3] expected_body = self.mime_encode(test_email['body'], user['name']) assert_in(expected_body, msg[3])
def test_DropoutExplicit(self): nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)], regularize='dropout', n_iter=1) assert_equal(nn.regularize, 'dropout') self._run(nn) assert_in('Using `dropout` for regularization.', self.output.getvalue())
def test_DropoutPerLayer(self): nn = MLPR(layers=[L("Rectifier", units=8, dropout=0.25), L("Linear")], regularize='dropout', n_iter=1) assert_equal(nn.regularize, 'dropout') self._run(nn) assert_in('Using `dropout` for regularization.', self.output.getvalue())
def test_RegularizeExplicitL2(self): nn = MLPR(layers=[L("Sigmoid", units=8), L("Softmax",)], regularize='L2', n_iter=1) assert_equal(nn.regularize, 'L2') self._run(nn) assert_in('Using `L2` for regularization.', self.output.getvalue())
def test_RegularizeCustomParam(self): nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)], weight_decay=0.01, n_iter=1) assert_equal(nn.weight_decay, 0.01) self._run(nn) assert_in('Using `L2` for regularization.', self.output.getvalue())
def see_a_multi_step_component(step, category): # Wait for all components to finish rendering selector = 'li.studio-xblock-wrapper div.xblock-student_view' world.wait_for(lambda _: len(world.css_find(selector)) == len(step.hashes)) for idx, step_hash in enumerate(step.hashes): if category == 'HTML': html_matcher = { 'Text': '\n \n', 'Announcement': '<p> Words of encouragement! This is a short note that most students will read. </p>', 'Zooming Image': '<h2>ZOOMING DIAGRAMS</h2>', 'E-text Written in LaTeX': '<h2>Example: E-text page</h2>', 'Raw HTML': '<p>This template is similar to the Text template. The only difference is', } actual_html = world.css_html(selector, index=idx) assert_in(html_matcher[step_hash['Component']], actual_html) else: actual_text = world.css_text(selector, index=idx) assert_in(step_hash['Component'].upper(), actual_text)
def test_RegularizeExplicitL1(self): nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)], regularize='L1', n_iter=1) assert_equal(nn.regularize, 'L1') self._run(nn) assert_in('Using `L1` for regularization.', self.output.getvalue())
def test_testprofile_group_manager_is_added(): """profile.TestProfile.group_manager: Tests are added to the profile""" prof = profile.TestProfile() with prof.group_manager(utils.Test, 'foo') as g: g(['a', 'b'], 'a') nt.assert_in(grouptools.join('foo', 'a'), prof.test_list)
def see_a_single_step_component(step): for step_hash in step.hashes: component = step_hash['Component'] assert_in(component, ['Discussion', 'Video']) component_css = 'div.xmodule_{}Module'.format(component) assert_true(world.is_css_present(component_css), "{} couldn't be found".format(component))
def test_get_article_by_search_wap(self): file_name = os.path.join(fake_data_path, 'search-gaokao-article.json') with io.open(file_name, encoding='utf-8') as f: wap_json = json.load(f) gzh_articles = WechatSogouStructuring.get_article_by_search_wap(gaokao_keyword, wap_json) assert_equal(10, len(gzh_articles)) titles = [] abstracts = [] gzh_names = [] isvs = [] open_ids = [] for i in gzh_articles: assert_in('gzh', i) assert_in('article', i) article = i['article'] titles.append(article['title']) abstracts.append(article['abstract']) assert_in('mp.weixin.qq.com/', article['url']) gzh = i['gzh'] assert_in('mp.weixin.qq.com/profile?src=3×tamp', gzh['profile_url']) assert_in('wx.qlogo.cn/mmhead', gzh['headimage']) gzh_names.append(gzh['wechat_name']) isvs.append(gzh['isv']) open_ids.append(gzh['open_id']) assert_equal(['高考有多重要,为什么要重视高考?丨微观点', '高考:穷人考不好,中产考状元,精英不高考', '17个高考落榜者的“逆袭”故事:高考失败,天不会塌', '刚刚,高考“满分”诞生了!(附各省高考分数线)', '高考2017 | 全国各省区市高考录取时间大汇总,最新最全!', '28省公布高考分数线!各省高考状元出炉!', '高考2017 | 教育部发布高招录取工作通知!六大事项看过来', '高考录取过程详解', '高考前互有好感,高考后开始拍拖,还一同被清华录取!学霸早恋...', '高考复读,你怕了吗?'], titles) assert_equal(['针对这个问题,其实占豪已经谈过,但还是想借高考之后、借这位小战友的留言,结合自己的人生经验,谈谈个人对这件事的看法.在占豪看来,现实的社会是分层的,一个一个阶...', '#条条大路通罗马,有人就出生在罗马#前几天北京文科高考状元熊轩昂接受澎湃新闻的采访的时候,说了下面这段话. “农村地区的孩子越来越难考上好学校,而像我这种父母都...', '从高考分数出来的那一刻,今年的考生们大概都会大胆猜想自己未来的命运:高分者,一脚踏进名牌高校工作不愁,似乎人生已经平步青云;落榜者,面对落魄的分数整日哀叹,或...', '高考会有满分的情况吗?还真有!6月22日开始,全国各省的高考成绩陆续发布.22日晚上,成都市青白江区一个小区内人声鼎沸,因为小区里有一位今年参加高考的学生,总分...', '2017年高考录取工作开始了,各省区市高考录取工作何时进行?为了方便考生和家长及时了解,小编为大家作了最新最全的梳理.(图片可点击放大查看) 北京7月6日,飞行专业...', '随着阅卷工作的结束,各地开始陆续公布2017年高考录取分数线.目前,已有28个省份公布了高考分数线.青海、新疆、西藏尚未公布.据媒体报道,青海将于6月30日前发布成绩...', '有关省级教育行政部门、招生考试机构要精心实施减少录取批次改革,完善平行志愿投档录取办法,努力提高考生志愿满足率.上海、浙江要精心组织新高考录取工作,细化完善工...', '在高考录取过程中,我省和全国各地一样都实行计算机远程网上录取的方式.录取中坚持“学校负责、招办监督”的原则,整个录取过程严格按照录取日程安排,分批次进行录取....', '但学霸们在这个问题上有自己的选择,今年佛山有一对高分学霸,两人虽早有好感,但均理性选择高考后才开始拍拖,两人一同考上清华,在班上传为佳话.然而,有家长担心孩子...', '我家孩子高考失利了,只考了326分,刚到本科线,本科没希望了,哎!我家闺女也是文科370分,真愁人,该怎么办呢?让孩子走专科,孩子不甘心,做家长的也不甘心,复习,...'] , abstracts) assert_equal(['占豪', '才华有限青年', '新闻哥', '光明网', '微言教育', '中国经济网', '阳光高考信息平台', '甘肃教育', '广州日报', '河北高考'], gzh_names) assert_equal(['0', '1', '1', '1', '1', '1', '1', '1', '1', '0'], isvs) assert_equal(['oIWsFt8nKJlpLQbQ5H9NMPBjxup8', 'oIWsFt24BFRU0oh5C8cGFo7vAwYk', 'oIWsFt7B8jj2BkEA1WsGkPU40uhU', 'oIWsFtwaY2ERrY_oAgz5pHTn4aGc', 'oIWsFt5d7GugmQYi0cNC60qYV9c4', 'oIWsFt0B7LsVbUCMpgksNY8tqIno', 'oIWsFtzrEz_Tydpahalp9daXMg0Y', 'oIWsFt5kk9RnueF3AiUOao2XrP9o', 'oIWsFt7aLTQfT_wmrF4GpT27_xjg', 'oIWsFt3nYBUhqb4beN3rTBxdUHD8'], open_ids)
def test_testprofile_group_manager_no_name_args_gt_one(): """profile.TestProfile.group_manager: no name and len(args) > 1 is valid""" prof = profile.TestProfile() with prof.group_manager(utils.Test, 'foo') as g: g(['a', 'b']) nt.assert_in(grouptools.join('foo', 'a b'), prof.test_list)
def test_line_magics(): ip = get_ipython() c = ip.Completer s, matches = c.complete(None, 'lsmag') nt.assert_in('%lsmagic', matches) s, matches = c.complete(None, '%lsmag') nt.assert_in('%lsmagic', matches)
def test_email(self): self._subscribe() # as current user: test-admin user2 = M.User.query.get(username='******') self._subscribe(user=user2) self._post_notification() ThreadLocalORMSession.flush_all() assert_equal(M.Notification.query.get()['from_address'], '"Test Admin" <*****@*****.**>') assert_equal(M.Mailbox.query.find().count(), 2) M.MonQTask.run_ready() # sends the notification out into "mailboxes", and from mailboxes into email tasks mboxes = M.Mailbox.query.find().all() assert_equal(len(mboxes), 2) assert_equal(len(mboxes[0].queue), 1) assert not mboxes[0].queue_empty assert_equal(len(mboxes[1].queue), 1) assert not mboxes[1].queue_empty email_tasks = M.MonQTask.query.find({'state': 'ready'}).all() assert_equal(len(email_tasks), 2) # make sure both subscribers will get an email first_destinations = [e.kwargs['destinations'][0] for e in email_tasks] assert_in(str(c.user._id), first_destinations) assert_in(str(user2._id), first_destinations) assert_equal(email_tasks[0].kwargs['fromaddr'], '"Test Admin" <*****@*****.**>') assert_equal(email_tasks[1].kwargs['fromaddr'], '"Test Admin" <*****@*****.**>') assert_equal(email_tasks[0].kwargs['sender'], '*****@*****.**') assert_equal(email_tasks[1].kwargs['sender'], '*****@*****.**') assert email_tasks[0].kwargs['text'].startswith('Home modified by Test Admin') assert 'you indicated interest in ' in email_tasks[0].kwargs['text']
def test_edit_issue(self): # goto issue show page env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_show', dataset_id=self.dataset['id'], issue_number=self.issue['number']), extra_environ=env, ) # click the edit link response = response.click(linkid='issue-edit-link', extra_environ=env) # fill in the form form = response.forms['issue-edit'] form['title'] = 'edited title' form['description'] = 'edited description' # save the form response = helpers.webtest_submit(form, 'save', extra_environ=env) response = response.follow() # make sure it all worked assert_in('edited title', response) assert_in('edited description', response) result = helpers.call_action('issue_show', dataset_id=self.dataset['id'], issue_number=self.issue['number']) assert_equals(u'edited title', result['title']) assert_equals(u'edited description', result['description'])
def check_inheritable_attribute(self, attribute, value): # `attribute` isn't a basic attribute of Sequence assert_false(hasattr(SequenceDescriptor, attribute)) # `attribute` is added by InheritanceMixin assert_true(hasattr(InheritanceMixin, attribute)) root = SequenceFactory.build(policy={attribute: str(value)}) ProblemFactory.build(parent=root) # InheritanceMixin will be used when processing the XML assert_in(InheritanceMixin, root.xblock_mixins) seq = self.process_xml(root) assert_equals(seq.unmixed_class, SequenceDescriptor) assert_not_equals(type(seq), SequenceDescriptor) # `attribute` is added to the constructed sequence, because # it's in the InheritanceMixin assert_equals(value, getattr(seq, attribute)) # `attribute` is a known attribute, so we shouldn't include it # in xml_attributes assert_not_in(attribute, seq.xml_attributes)
def _assert_document_links(self, document): service = self.service['services'] # Hardcoded stuff comes from 'get_documents' in service_presenters.py url_keys = [ 'pricingDocumentURL', 'sfiaRateDocumentURL', 'serviceDefinitionDocumentURL', 'termsAndConditionsDocumentURL' ] doc_hrefs = [a.get('href') for a in document.xpath( '//div[@id="meta"]//li[@class="document-list-item"]/a')] for url_key in url_keys: if url_key in self.service['services']: assert_in( # Replace all runs of whitespace with a '%20' self._replace_whitespace(service[url_key], '%20'), doc_hrefs ) if 'additionalDocumentURLs' in service: for document_url in service['additionalDocumentURLs']: assert_in( self._replace_whitespace(document_url, '%20'), doc_hrefs )
def test_super_repr(): output = pretty.pretty(super(SA)) nt.assert_in("SA", output) sb = SB() output = pretty.pretty(super(SA, sb)) nt.assert_in("SA", output)
def test_json_reports(self): """Test that json_reports.js works""" if 'CASPERJS_EXECUTABLE' in os.environ: casperjs_executable = os.environ['CASPERJS_EXECUTABLE'] else: casperjs_executable = 'casperjs' try: process = Popen( [ casperjs_executable, 'test', '--json', '--test-self', os.path.join( os.path.dirname(__file__), '../../casper_tests/json_report.js' ) ], stdout=PIPE, stderr=PIPE ) except OSError as e: return stdout_data, stderr_data = process.communicate() assert_in( '#JSON{"successes":["test json_report.js: a success"],"failures":["test json_report.js: a failure"]}', stdout_data.split("\n") )
def test_get_courses_for_wiki(self): """ Test the get_courses_for_wiki method """ for course_number in self.courses: course_locations = self.draft_store.get_courses_for_wiki(course_number) assert_equals(len(course_locations), 1) assert_equals(Location('edX', course_number, '2012_Fall', 'course', '2012_Fall'), course_locations[0]) course_locations = self.draft_store.get_courses_for_wiki('no_such_wiki') assert_equals(len(course_locations), 0) # set toy course to share the wiki with simple course toy_course = self.draft_store.get_course(SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')) toy_course.wiki_slug = 'simple' self.draft_store.update_item(toy_course) # now toy_course should not be retrievable with old wiki_slug course_locations = self.draft_store.get_courses_for_wiki('toy') assert_equals(len(course_locations), 0) # but there should be two courses with wiki_slug 'simple' course_locations = self.draft_store.get_courses_for_wiki('simple') assert_equals(len(course_locations), 2) for course_number in ['toy', 'simple']: assert_in(Location('edX', course_number, '2012_Fall', 'course', '2012_Fall'), course_locations) # configure simple course to use unique wiki_slug. simple_course = self.draft_store.get_course(SlashSeparatedCourseKey('edX', 'simple', '2012_Fall')) simple_course.wiki_slug = 'edX.simple.2012_Fall' self.draft_store.update_item(simple_course) # it should be retrievable with its new wiki_slug course_locations = self.draft_store.get_courses_for_wiki('edX.simple.2012_Fall') assert_equals(len(course_locations), 1) assert_in(Location('edX', 'simple', '2012_Fall', 'course', '2012_Fall'), course_locations)
def test_rows(): row_keys = ['rows-row1', 'rows-row2', 'rows-row3'] data_old = {'cf1:col1': 'v1old', 'cf1:col2': 'v2old'} data_new = {'cf1:col1': 'v1new', 'cf1:col2': 'v2new'} with assert_raises(TypeError): table.rows(row_keys, object()) with assert_raises(TypeError): table.rows(row_keys, timestamp='invalid') for row_key in row_keys: table.put(row_key, data_old, timestamp=4000) for row_key in row_keys: table.put(row_key, data_new) assert_dict_equal({}, table.rows([])) rows = dict(table.rows(row_keys)) for row_key in row_keys: assert_in(row_key, rows) assert_dict_equal(data_new, rows[row_key]) rows = dict(table.rows(row_keys, timestamp=5000)) for row_key in row_keys: assert_in(row_key, rows) assert_dict_equal(data_old, rows[row_key])
def test_get_context_data(self): self.view.draft = self.dr1 res = self.view.get_context_data() nt.assert_is_instance(res, dict) nt.assert_in('draft', res) nt.assert_is_instance(res['draft'], dict) nt.assert_in('IMMEDIATE', res)
def _assert_breadcrumbs(self, document, lot): # check links exist back to # (1) digital marketplace, # (2) cloud tech and support, # (3) search page for lot # Hardcoded stuff found in 'views.py' breadcrumbs_expected = { '/': 'Digital Marketplace', '/g-cloud': 'Cloud technology and support', '/g-cloud/search?lot={}'.format(lot.lower()): self.lots[lot] } breadcrumbs = document.xpath('//div[@id="global-breadcrumb"]//a') assert_equal(3, len(breadcrumbs)) for breadcrumb in breadcrumbs: breadcrumb_text = breadcrumb.text_content().strip() breakcrumb_href = breadcrumb.get('href').strip() # check that the link exists in our expected breadcrumbs assert_in( breakcrumb_href, breadcrumbs_expected ) # check that the link text is the same assert_equal( breadcrumb_text, breadcrumbs_expected[breakcrumb_href] )
def test_families(): families = table.families() for name, fdesc in families.iteritems(): assert_is_instance(name, basestring) assert_is_instance(fdesc, dict) assert_in('name', fdesc) assert_in('max_versions', fdesc)
def test_numpy_reset_array_undec(): "Test '%reset array' functionality" _ip.ex('import numpy as np') _ip.ex('a = np.empty(2)') nt.assert_in('a', _ip.user_ns) _ip.magic('reset -f array') nt.assert_not_in('a', _ip.user_ns)
def test_date_instead_of_revision(self): app = self._get_test_app() response = app.get( '/api/search/revision' '?since_id=2010-01-01T00:00:00', status=404) assert_in('Not found - There is no revision', response.body)
def test_script_config(): ip = get_ipython() ip.config.ScriptMagics.script_magics = ['whoda'] sm = script.ScriptMagics(shell=ip) nt.assert_in('whoda', sm.magics['cell'])
def i_am_brought_to_course_outline(step): assert_in('Course Outline', world.css_text('.outline .page-header')) assert_equal(1, len(world.browser.windows))
def test_custom_repr(): """A custom repr should override a pretty printer for a parent type""" oc = OrderedCounter("abracadabra") nt.assert_in("OrderedCounter(OrderedDict", pretty.pretty(oc)) nt.assert_equal(pretty.pretty(MySet()), 'mine')
def test_bmap_map_control_panel(fake_writer): provinces = ["London"] values = [1] bmap = (BMap().add_schema( baidu_ak=FAKE_API_KEY, center=[ -0.118092, 51.509865 ]).add_coordinate("London", -0.118092, 51.509865).add( "bmap", [list(z) for z in zip(provinces, values)], type_=ChartType.LINES, label_opts=opts.LabelOpts(formatter="{b}"), ).add_control_panel( copyright_control_opts=opts.BMapCopyrightTypeOpts(position=3), maptype_control_opts=opts.BMapTypeControlOpts( type_=BMapType.MAPTYPE_CONTROL_DROPDOWN), scale_control_opts=opts.BMapScaleControlOpts(), overview_map_opts=opts.BMapOverviewMapControlOpts(is_open=True), navigation_control_opts=opts.BMapNavigationControlOpts(), geo_location_control_opts=opts.BMapGeoLocationControlOpts(), )) bmap.render() content = fake_writer.call_args[0][1] assert_in("new BMap.CopyrightControl", content) assert_in("new BMap.MapTypeControl", content) assert_in("new BMap.ScaleControl", content) assert_in("new BMap.OverviewMapControl", content) assert_in("new BMap.NavigationControl", content) assert_in("new BMap.GeolocationControl", content)
def test_revision_doesnt_exist_api_v2(self): app = self._get_test_app() response = app.get('/api/2/search/revision?since_id=1234', status=404) assert_in('Not found - There is no revision', response.body)
def test_no_value(self): app = self._get_test_app() response = app.get('/api/search/revision?since_id=', status=400) assert_in('Bad request - No revision specified', response.body)
self.replica_client.add_replicas(rse='MOCK', files=[{ 'scope': tmp_scope, 'name': tmp_name, 'bytes': 1L, 'adler32': '0cc737eb' }]) self.replica_client.add_replicas(rse='MOCK3', files=[{ 'scope': tmp_scope, 'name': tmp_name, 'bytes': 1L, 'adler32': '0cc737eb' }]) _, out, _ = execute(cmd) assert_in('303 See Other', out) assert_in('Location: https://mock', out) class TestReplicaMetalinkRedirection: def __init__(self): self.cacert = config_get('test', 'cacert') self.host = config_get('client', 'rucio_host') self.auth_host = config_get('client', 'auth_host') self.marker = '$> ' # get auth token self.base_client = BaseClient() self.token = self.base_client.headers['X-Rucio-Auth-Token'] self.replica_client = ReplicaClient() def test_replica_meta_redirection(self):
def test_contentstore_attrs(self): """ Test getting, setting, and defaulting the locked attr and arbitrary attrs. """ location = Location('edX', 'toy', '2012_Fall', 'course', '2012_Fall') course_content, __ = self.content_store.get_all_content_for_course(location.course_key) assert_true(len(course_content) > 0) filter_params = _build_requested_filter('Images') filtered_course_content, __ = self.content_store.get_all_content_for_course( location.course_key, filter_params=filter_params) assert_true(len(filtered_course_content) < len(course_content)) # a bit overkill, could just do for content[0] for content in course_content: assert not content.get('locked', False) asset_key = AssetLocation._from_deprecated_son(content.get('content_son', content['_id']), location.run) assert not self.content_store.get_attr(asset_key, 'locked', False) attrs = self.content_store.get_attrs(asset_key) assert_in('uploadDate', attrs) assert not attrs.get('locked', False) self.content_store.set_attr(asset_key, 'locked', True) assert self.content_store.get_attr(asset_key, 'locked', False) attrs = self.content_store.get_attrs(asset_key) assert_in('locked', attrs) assert attrs['locked'] is True self.content_store.set_attrs(asset_key, {'miscel': 99}) assert_equals(self.content_store.get_attr(asset_key, 'miscel'), 99) asset_key = AssetLocation._from_deprecated_son( course_content[0].get('content_son', course_content[0]['_id']), location.run ) assert_raises( AttributeError, self.content_store.set_attr, asset_key, 'md5', 'ff1532598830e3feac91c2449eaa60d6' ) assert_raises( AttributeError, self.content_store.set_attrs, asset_key, {'foo': 9, 'md5': 'ff1532598830e3feac91c2449eaa60d6'} ) assert_raises( NotFoundError, self.content_store.get_attr, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'), 'displayname' ) assert_raises( NotFoundError, self.content_store.set_attr, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'), 'displayname', 'hello' ) assert_raises( NotFoundError, self.content_store.get_attrs, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus') ) assert_raises( NotFoundError, self.content_store.set_attrs, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'), {'displayname': 'hello'} ) assert_raises( NotFoundError, self.content_store.set_attrs, Location('bogus', 'bogus', 'bogus', 'asset', None), {'displayname': 'hello'} )
def test_no_search_term_api_v2(self): app = self._get_test_app() response = app.get('/api/2/search/revision', status=400) assert_in('Bad request - Missing search term', response.body)
def test_multi_target_init(): config = Config() config.update({ "multiprocessing": False, "blocking": True, "device": "cpu", "num_epochs": 1, "num_inputs": 3, "num_outputs": {"t1": 4, "t2": 5}, "learning_rate": 1.0, }) config.network_topology_json = """ { "fw0": {"class": "hidden", "activation": "identity", "n_out": 3}, "out1": {"class": "softmax", "loss": "ce", "target": "t1", "from": ["fw0"]}, "out2": {"class": "softmax", "loss": "ce", "target": "t2", "from": ["fw0"]} } """ device = Device("cpu", config=config, blocking=True) assert_true(device.trainnet, "train network initialized") assert_true(device.testnet, "test network initialized") param_vars = device.trainnet.get_all_params_vars() print("params:", param_vars) assert_equal(len(param_vars), 6, "W, b vars for each out, and fw") num_params = get_num_params(param_vars) assert_equal(num_params, (3 * 3 + 3) + (3 * 4 + 4) + (3 * 5 + 5), "W, b for each out, and fw") assert_in("fw0", device.testnet.hidden) assert_in("out1", device.testnet.output) assert_in("out2", device.testnet.output) assert_is(device.testnet.j["t1"], device.testnet.output["out1"].index) assert_true(device.updater) update_list = device.updater.getUpdateList() print("update list:") pprint(update_list) update_dict = dict(update_list) assert_equal(len(update_dict), len(update_list), "all params in update list only once") assert_in("fw0", device.trainnet.hidden) assert_equal(len(device.trainnet.hidden), 1) assert_in("W_in_data_fw0", device.trainnet.hidden["fw0"].params) assert_in("b_fw0", device.trainnet.hidden["fw0"].params) assert_equal(len(device.trainnet.hidden["fw0"].params), 2) assert_in("out1", device.trainnet.output) assert_equal(len(device.trainnet.output), 2) assert_in("W_in_fw0_out1", device.trainnet.output["out1"].params) assert_in("b_out1", device.trainnet.output["out1"].params) assert_equal(len(device.trainnet.output["out1"].params), 2) assert_in(device.trainnet.hidden["fw0"].params["W_in_data_fw0"], update_dict) assert_in(device.trainnet.hidden["fw0"].params["b_fw0"], update_dict) assert_in(device.trainnet.output["out1"].params["W_in_fw0_out1"], update_dict) assert_in(device.trainnet.output["out1"].params["b_out1"], update_dict) assert_in(device.trainnet.output["out2"].params["W_in_fw0_out2"], update_dict) assert_in(device.trainnet.output["out2"].params["b_out2"], update_dict) # assert_equal(len(update_dict), 6) # updater adds other stuff... # Set net params. net_params = { "fw0": {"W_in_data_fw0": numpy.identity(3, dtype="float32"), "b_fw0": numpy.zeros((3,), dtype="float32")}, "out1": {"W_in_fw0_out1": numpy.arange(0.0, 1.2, 0.1, dtype="float32").reshape((3, 4)), "b_out1": numpy.arange(0.0, 4, dtype="float32")}, "out2": {"W_in_fw0_out2": numpy.arange(0.0, 1.5, 0.1, dtype="float32").reshape((3, 5)), "b_out2": numpy.arange(0.0, 5, dtype="float32")} } device.trainnet.set_params_by_dict(net_params) device.testnet.set_params_by_dict(net_params) # Show params. for p in param_vars: print("init %s:" % p) pprint(p.get_value()) # Init dataset. dataset = StaticDataset(data=[{ "data": numpy.array([[0.1, 0.2, -0.3]], dtype="float32"), "t1": numpy.array([2]), "t2": numpy.array([4]) }], output_dim=config.typed_value("num_outputs")) dataset.init_seq_order() assert_equal(dataset.is_data_sparse("data"), False) assert_equal(dataset.is_data_sparse("t1"), True) assert_equal(dataset.is_data_sparse("t2"), True) # Copy to device allocation. success = assign_dev_data_single_seq(device, dataset, 0) assert_true(success, "failed to allocate & assign data") # Check allocated data. assert_equal(device.targets["data"].shape, (1, 1, 3)) # input shape. (time,batch,dim) assert_in("t1", device.targets) assert_in("t2", device.targets) assert_equal(device.targets["t1"].shape, (1, 1)) assert_equal(device.targets["t2"].shape, (1, 1)) assert_equal(device.output_index["data"].shape, (1, 1)) numpy.testing.assert_equal(device.output_index["data"], numpy.array([[1]])) assert_equal(device.output_index["t1"].shape, (1, 1)) numpy.testing.assert_equal(device.output_index["t1"], numpy.array([[1]])) # Forward test. device.update_data() device.testnet.costs["out1"].name = "out1_cost" # nice in the func graph out_i1 = device.testnet.output["out1"].index out_i1_nonzero = device.testnet.output["out1"].i nll1, pcx1 = T.nnet.crossentropy_softmax_1hot(x=device.testnet.output["out1"].y_m[out_i1_nonzero], y_idx=device.testnet.output["out1"].y_data_flat[out_i1_nonzero]) forward_func = theano.function( inputs=[device.block_start, device.block_end], outputs=[ device.testnet.j["t1"], out_i1, out_i1_nonzero[0], nll1, pcx1, device.testnet.costs["out1"], device.testnet.output["out1"].p_y_given_x, device.testnet.costs["out2"], device.testnet.output["out2"].p_y_given_x], givens=device.make_givens(device.testnet), no_default_updates=True, on_unused_input='warn', name="forward") #print "forward func:" #theano.printing.debugprint(forward_func) net_j1, out_i1_val, out_i1_nz_val, nll1_val, pcx1_val, t1_cost, t1_y, t2_cost, t2_y = forward_func(0, 1) print("forward results:") pprint(net_j1) pprint(out_i1_val) pprint(out_i1_nz_val) pprint(nll1_val) pprint(pcx1_val) pprint(t1_cost) pprint(t1_y) pprint(t2_cost) pprint(t2_y) assert_equal(net_j1, numpy.array([[1]])) assert_equal(out_i1_val, numpy.array([[1]])) assert_equal(out_i1_nz_val, numpy.array([0])) assert_almost_equal(nll1_val, numpy.array([t1_cost])) numpy.testing.assert_almost_equal(t1_y, pcx1_val[None,...]) assert_almost_equal(t1_cost, 1.440189698561195, places=6) assert_almost_equal(t2_cost, 0.45191439593759336, places=6) numpy.testing.assert_almost_equal(t1_y, numpy.array([[[ 0.0320586 , 0.08714432, 0.23688282, 0.64391426]]]), decimal=6) numpy.testing.assert_almost_equal(t2_y, numpy.array([[[ 0.01165623, 0.03168492, 0.08612854, 0.23412166, 0.63640865]]]), decimal=6) # One train step. device.set_learning_rate(config.typed_value("learning_rate")) device.run("train") output_list, outputs_format = device.result() assert_is_instance(output_list, list) assert_true(outputs_format, "for train, we should always get the format") outputs = Device.make_result_dict(output_list, outputs_format) pprint(outputs) assert_in("cost:out1", outputs) assert_greater(outputs["cost:out1"], 0) assert_almost_equal(outputs["cost:out1"], t1_cost) # Get net params. params = device.get_net_train_params(device.trainnet) references_params = { "W_in_data_fw0": numpy.array([[ 1.00055406e+00, 5.54056978e-04, 5.54056978e-04], [ 1.10811396e-03, 1.00110811e+00, 1.10811396e-03], [ -1.66217093e-03, -1.66217093e-03, 9.98337829e-01]]), "b_fw0": numpy.array([ 0.00554057, 0.00554057, 0.00554057]), "W_in_fw0_out1": numpy.array([[-0.00320586, 0.09128557, 0.27631172, 0.23560857], [ 0.39358828, 0.48257114, 0.75262344, 0.57121715], [ 0.80961758, 0.9261433 , 0.77106485, 1.29317428]]), "b_out1": numpy.array([-0.0320586 , 0.91285568, 2.76311718, 2.35608574]), "W_in_fw0_out2": numpy.array([[ -1.16562310e-03, 9.68315079e-02, 1.91387146e-01, 2.76587834e-01, 4.36359135e-01], [ 4.97668754e-01, 5.93663016e-01, 6.82774291e-01, 7.53175669e-01, 9.72718271e-01], [ 1.00349687e+00, 1.10950548e+00, 1.22583856e+00, 1.37023650e+00, 1.29092259e+00]]), "b_out2": numpy.array([-0.01165623, 0.96831508, 1.91387146, 2.76587834, 4.36359135]) } assert_equal(len(param_vars), len(params)) for p, v in zip(param_vars, params): print("%s:" % p) pprint(v) assert_true(p.name) numpy.testing.assert_almost_equal(references_params[p.name], v, decimal=6)
def test_getWithEntityBundle(self, download_file_mock, get_file_URL_and_metadata_mock): # Note: one thing that remains unexplained is why the previous version of # this test worked if you had a .cacheMap file of the form: # {"/Users/chris/.synapseCache/663/-1337/anonymous": "2014-09-15T22:54:57.000Z", # "/var/folders/ym/p7cr7rrx4z7fw36sxv04pqh00000gq/T/tmpJ4nz8U": "2014-09-15T23:27:25.000Z"} # ...but failed if you didn't. bundle = { 'entity': { 'id': 'syn10101', 'name': 'anonymous', 'dataFileHandleId': '-1337', 'concreteType': 'org.sagebionetworks.repo.model.FileEntity', 'parentId': 'syn12345'}, 'fileHandles': [{ 'concreteType': 'org.sagebionetworks.repo.model.file.S3FileHandle', 'fileName': 'anonymous', 'contentType': 'application/flapdoodle', 'contentMd5': '1698d26000d60816caab15169efcd23a', 'id': '-1337'}], 'annotations': {}} fileHandle = bundle['fileHandles'][0]['id'] cacheDir = syn.cache.get_cache_dir(fileHandle) # Make sure the .cacheMap file does not already exist cacheMap = os.path.join(cacheDir, '.cacheMap') if os.path.exists(cacheMap): os.remove(cacheMap) def _downloadFileHandle(fileHandleId, objectId, objectType, path, retries=5): # touch file at path with open(path, 'a'): os.utime(path, None) os.path.split(path) syn.cache.add(fileHandle, path) return path def _getFileHandleDownload(fileHandleId, objectId, objectType='FileHandle'): return {'fileHandle': bundle['fileHandles'][0], 'fileHandleId': fileHandleId, 'preSignedURL': 'http://example.com'} download_file_mock.side_effect = _downloadFileHandle get_file_URL_and_metadata_mock.side_effect = _getFileHandleDownload # 1. ---------------------------------------------------------------------- # download file to an alternate location temp_dir1 = tempfile.mkdtemp() e = syn._getWithEntityBundle(entityBundle=bundle, downloadLocation=temp_dir1, ifcollision="overwrite.local") assert_equal(e.name, bundle["entity"]["name"]) assert_equal(e.parentId, bundle["entity"]["parentId"]) assert_equal(utils.normalize_path(os.path.abspath(os.path.dirname(e.path))), utils.normalize_path(temp_dir1)) assert_equal(bundle["fileHandles"][0]["fileName"], os.path.basename(e.path)) assert_equal(utils.normalize_path(os.path.abspath(e.path)), utils.normalize_path(os.path.join(temp_dir1, bundle["fileHandles"][0]["fileName"]))) # 2. ---------------------------------------------------------------------- # get without specifying downloadLocation e = syn._getWithEntityBundle(entityBundle=bundle, ifcollision="overwrite.local") assert_equal(e.name, bundle["entity"]["name"]) assert_equal(e.parentId, bundle["entity"]["parentId"]) assert_in(bundle["fileHandles"][0]["fileName"], e.files) # 3. ---------------------------------------------------------------------- # download to another location temp_dir2 = tempfile.mkdtemp() assert_not_equals(temp_dir2, temp_dir1) e = syn._getWithEntityBundle(entityBundle=bundle, downloadLocation=temp_dir2, ifcollision="overwrite.local") assert_in(bundle["fileHandles"][0]["fileName"], e.files) assert_is_not_none(e.path) assert_true(utils.equal_paths(os.path.dirname(e.path), temp_dir2)) # 4. ---------------------------------------------------------------------- # test preservation of local state url = 'http://foo.com/secretstuff.txt' # need to create a bundle with externalURL externalURLBundle = dict(bundle) externalURLBundle['fileHandles'][0]['externalURL'] = url e = File(name='anonymous', parentId="syn12345", synapseStore=False, externalURL=url) e.local_state({'zap': 'pow'}) e = syn._getWithEntityBundle(entityBundle=externalURLBundle, entity=e) assert_equal(e.local_state()['zap'], 'pow') assert_equal(e.synapseStore, False) assert_equal(e.externalURL, url)
def test_combi_auto_enc(): config = Config() config.update({ "multiprocessing": False, "blocking": True, "device": "cpu", "num_epochs": 1, "num_inputs": 3, "num_outputs": {"classes": 2}, "learning_rate": 1.0, "network": { "output": {"class": "softmax", "loss": "ce", "target": "classes"}, "auto-enc": {"class": "softmax", "loss": "sse", "dtype": "float32", "target": "data"} } }) device = Device("cpu", config=config, blocking=True) # Set net params. def get_net_params(with_auto_enc=True): d = { "output": {"W_in_data_output": numpy.arange(0.1, 0.7, 0.1, dtype="float32").reshape((3, 2)), "b_output": numpy.arange(0.0, 2, dtype="float32")} } if with_auto_enc: d["auto-enc"] = {"W_in_data_auto-enc": numpy.arange(0.1, 1.0, 0.1, dtype="float32").reshape((3, 3)), "b_auto-enc": numpy.arange(0.0, 3, dtype="float32")} return d device.trainnet.set_params_by_dict(get_net_params()) device.testnet.set_params_by_dict(get_net_params()) # Show params. for p in device.trainnet.get_all_params_vars(): print("init %s:" % p) pprint(p.get_value()) # Init dataset. dataset = StaticDataset(data=[{ "data": numpy.array([[0.1, 0.2, -0.3]], dtype="float32"), "classes": numpy.array([1]), }], output_dim=config.typed_value("num_outputs")) dataset.init_seq_order() # Copy to device allocation. success = assign_dev_data_single_seq(device, dataset, 0) assert_true(success, "failed to allocate & assign data") # One train step. device.set_learning_rate(config.typed_value("learning_rate")) device.run("train") output_list, outputs_format = device.result() assert_is_instance(output_list, list) assert_true(outputs_format, "for train, we should always get the format") outputs = Device.make_result_dict(output_list, outputs_format) pprint(outputs) assert_in("cost:output", outputs) assert_in("cost:auto-enc", outputs) expected_cost_output = 0.3132616877555847 assert_almost_equal(outputs["cost:output"], expected_cost_output, places=6) exact_cost_output = outputs["cost:output"] assert_almost_equal(outputs["cost:auto-enc"], 1.7544001340866089, places=6) # Now, drop the auto-enc from the network, and redo the same thing. del config.typed_value("network")["auto-enc"] device = Device("cpu", config=config, blocking=True) device.trainnet.set_params_by_dict(get_net_params(with_auto_enc=False)) device.testnet.set_params_by_dict(get_net_params(with_auto_enc=False)) for p in device.trainnet.get_all_params_vars(): print("second run, init %s:" % p) pprint(p.get_value()) dataset.init_seq_order() # reset. probably not needed success = assign_dev_data_single_seq(device, dataset, 0) assert_true(success, "failed to allocate & assign data") device.set_learning_rate(config.typed_value("learning_rate")) device.run("train") output_list, outputs_format = device.result() assert_is_instance(output_list, list) assert_true(outputs_format, "for train, we should always get the format") outputs = Device.make_result_dict(output_list, outputs_format) pprint(outputs) assert_in("cost:output", outputs) assert_not_in("cost:auto-enc", outputs) assert_almost_equal(outputs["cost:output"], expected_cost_output, places=6) assert_equal(outputs["cost:output"], exact_cost_output)
def test_templates_contents(): block = make_block() block.display_name = "Test Drag & Drop" block.question_text = "Question Drag & Drop" block.weight = 5 student_fragment = block.render('student_view', Mock()) assert_in('<section class="xblock--drag-and-drop">', student_fragment.content) assert_in('{{ value }}', student_fragment.content) assert_in("Test Drag & Drop", student_fragment.content) assert_in("Question Drag & Drop", student_fragment.content) assert_in("(5 Points Possible)", student_fragment.content) studio_fragment = block.render('studio_view', Mock()) assert_in('<div class="xblock--drag-and-drop editor-with-buttons">', studio_fragment.content) assert_in('{{ value }}', studio_fragment.content)
def test_contains_single_port(self): group = TransportGroup() assert_in(2, group) group.add('1-2') assert_in(1, group) assert_not_in(3, group)
def test_combi_auto_enc_longer(): config = Config() config.update({ "multiprocessing": False, "blocking": True, "device": "cpu", "num_epochs": 1, "num_inputs": 3, "num_outputs": {"classes": 2}, "learning_rate": 1.0, "adadelta": True, "network": { "output": {"class": "softmax", "loss": "ce", "target": "classes"}, "auto-enc": {"class": "softmax", "loss": "sse", "dtype": "float32", "target": "data"} } }) device = Device("cpu", config=config, blocking=True) # Set net params. def get_net_params(with_auto_enc=True): d = { "output": {"W_in_data_output": numpy.arange(0.1, 0.7, 0.1, dtype="float32").reshape((3, 2)), "b_output": numpy.arange(0.0, 2, dtype="float32")} } if with_auto_enc: d["auto-enc"] = {"W_in_data_auto-enc": numpy.arange(0.1, 1.0, 0.1, dtype="float32").reshape((3, 3)), "b_auto-enc": numpy.arange(0.0, 3, dtype="float32")} return d device.trainnet.set_params_by_dict(get_net_params()) device.testnet.set_params_by_dict(get_net_params()) # Show params. for p in device.trainnet.get_all_params_vars(): print("init %s:" % p) pprint(p.get_value()) # Init dataset. dataset = DummyDataset(input_dim=config.typed_value("num_inputs"), output_dim=config.typed_value("num_outputs"), num_seqs=10) dataset.init_seq_order() cost_output_sum = 0.0 for seq_idx in range(dataset.num_seqs): # Copy to device allocation. success = assign_dev_data_single_seq(device, dataset, seq_idx) assert_true(success, "failed to allocate & assign data") # One train step. device.set_learning_rate(config.typed_value("learning_rate")) device.run("train") output_list, outputs_format = device.result() assert_is_instance(output_list, list) assert_true(outputs_format, "for train, we should always get the format") outputs = Device.make_result_dict(output_list, outputs_format) print(("seq %i" % seq_idx)) pprint(outputs) assert_in("cost:output", outputs) assert_in("cost:auto-enc", outputs) cost_output_sum += outputs["cost:output"] # Now, drop the auto-enc from the network, and redo the same thing. del config.typed_value("network")["auto-enc"] device = Device("cpu", config=config, blocking=True) device.trainnet.set_params_by_dict(get_net_params(with_auto_enc=False)) device.testnet.set_params_by_dict(get_net_params(with_auto_enc=False)) for p in device.trainnet.get_all_params_vars(): print("second run, init %s:" % p) pprint(p.get_value()) dataset.init_seq_order() # reset cost2_output_sum = 0.0 for seq_idx in range(dataset.num_seqs): # Copy to device allocation. success = assign_dev_data_single_seq(device, dataset, seq_idx) assert_true(success, "failed to allocate & assign data") # One train step. device.set_learning_rate(config.typed_value("learning_rate")) device.run("train") output_list, outputs_format = device.result() assert_is_instance(output_list, list) assert_true(outputs_format, "for train, we should always get the format") outputs = Device.make_result_dict(output_list, outputs_format) print(("seq %i" % seq_idx)) pprint(outputs) assert_in("cost:output", outputs) assert_not_in("cost:auto-enc", outputs) cost2_output_sum += outputs["cost:output"] assert_equal(cost_output_sum, cost2_output_sum) assert_almost_equal(cost_output_sum, 16.028842568397522, places=6)
def test_form_renders(self): app = self._get_test_app() env, response = _get_group_new_page(app) assert_in('group-edit', response.forms)
def test_from_string(self): from_string = TransportGroup.from_string group = from_string(1) assert_is_instance(group, TransportGroup) address = TransportAddress.from_string(1) assert_in(address, group)
def test_activity_extras(self): t = Ticket(summary='my ticket', ticket_num=12) assert_in('allura_id', t.activity_extras) assert_equal(t.activity_extras['summary'], t.summary)
def test_repr_doesnt_crash(sub): # Not much we can do here, but we should make sure __repr__ doesn't crash # or anything and it does at least vaguely look like what we want assert_in('Subtitle', repr(sub)) assert_in(str(sub.index), repr(sub))
def test_NetworkDescription_to_json_config1(): config = Config() config.update(config1_dict) desc = LayerNetworkDescription.from_config(config) desc_json_content = desc.to_json_content() pprint(desc_json_content) assert_in("hidden_0", desc_json_content) assert_equal(desc_json_content["hidden_0"]["class"], "forward") assert_in("hidden_1", desc_json_content) assert_in("output", desc_json_content) orig_network = LayerNetwork.from_description(desc) assert_in("hidden_0", orig_network.hidden) assert_in("hidden_1", orig_network.hidden) assert_equal(len(orig_network.hidden), 2) assert_is_instance(orig_network.hidden["hidden_0"], ForwardLayer) assert_equal(orig_network.hidden["hidden_0"].layer_class, "hidden") orig_json_content = orig_network.to_json_content() pprint(orig_json_content) assert_in("hidden_0", orig_json_content) assert_equal(orig_json_content["hidden_0"]["class"], "hidden") assert_in("hidden_1", orig_json_content) assert_in("output", orig_json_content) new_network = LayerNetwork.from_json( desc_json_content, config1_dict["num_inputs"], {"classes": (config1_dict["num_outputs"], 1)}) new_json_content = new_network.to_json_content() if orig_json_content != new_json_content: print(dict_diff_str(orig_json_content, new_json_content)) assert_equal(orig_json_content, new_network.to_json_content())
def test_calendar_template_content(self): # pylint: disable=no-self-use """ Test content of GoogleCalendarBlock's rendered views """ block = TestGoogleCalendarBlock.make_calendar_block() block.usage_id = Mock() student_fragment = block.render('student_view', Mock()) # pylint: disable=no-value-for-parameter assert_in('<div class="google-calendar-xblock-wrapper">', student_fragment.content) assert_in(cgi.escape(DEFAULT_CALENDAR_URL), student_fragment.content) assert_in('Google Calendar', student_fragment.content) studio_fragment = block.render('studio_view', Mock()) assert_in(STUDIO_EDIT_WRAPPER, studio_fragment.content) assert_in(VALIDATION_WRAPPER, studio_fragment.content) assert_in(USER_INPUTS_WRAPPER, studio_fragment.content) assert_in(BUTTONS_WRAPPER, studio_fragment.content)
def then_i_see_that_i_was_most_recently_in_the_subsection(step): message = world.css_text('section.course-content > p') assert_in("You were most recently in Test Subsection 2", message)
def test_config2_bidirect_lstm(): config = Config() config.update(config2_dict) desc = LayerNetworkDescription.from_config(config) assert_true(desc.bidirectional) network = LayerNetwork.from_config_topology(config) net_json = network.to_json_content() pprint(net_json) assert_in("output", net_json) assert_in("hidden_0_fw", net_json) assert_in("hidden_0_bw", net_json) assert_in("hidden_1_fw", net_json) assert_in("hidden_1_bw", net_json) assert_in("hidden_2_fw", net_json) assert_in("hidden_2_bw", net_json) assert_equal(net_json["output"]["from"], ["hidden_2_fw", "hidden_2_bw"]) assert_equal(len(net_json), 7)
def test_list_dids_extended(self): """ META (CLIENTS) : Get all dids matching the values of the provided metadata keys """ # Test did Columns use case dsns = [] tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = { 'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta) dids = self.did_client.list_dids_extended(tmp_scope, { 'project': 'data12_8TeV', 'version': 'f392_m920' }) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn1) dids = self.did_client.list_dids_extended(tmp_scope, { 'project': 'data12_8TeV', 'run_number': 400001 }) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn2) dids = self.did_client.list_dids_extended( tmp_scope, { 'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ' }) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) # Test JSON use case if self.json_implemented: did1 = 'name_%s' % generate_uuid() did2 = 'name_%s' % generate_uuid() did3 = 'name_%s' % generate_uuid() did4 = 'name_%s' % generate_uuid() key1 = 'key_1_%s' % generate_uuid() key2 = 'key_2_%s' % generate_uuid() key3 = 'key_3_%s' % generate_uuid() value1 = 'value_1_%s' % generate_uuid() value2 = 'value_2_%s' % generate_uuid() value3 = 'value_3_%s' % generate_uuid() value_not_1 = 'value_not_1_%s' % generate_uuid() value_not_2 = 'value_not_1_%s' % generate_uuid() value_unique = 'value_unique_%s' % generate_uuid() self.did_client.add_did(scope=tmp_scope, name=did1, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did2, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did3, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did4, type="DATASET") self.did_client.set_metadata(scope=tmp_scope, name=did1, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did1, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key2, value=value_not_2) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key3, value=value3) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key1, value=value_not_1) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key3, value=value3) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key3, value=value_unique) # Key not there dids = self.did_client.list_dids_extended(tmp_scope, {'key45': 'value'}) results = [] for d in dids: results.append(d) assert_equal(len(results), 0) # Value not there dids = self.did_client.list_dids_extended( tmp_scope, {key1: 'value_not_there'}) results = [] for d in dids: results.append(d) assert_equal(len(results), 0) # key1 = value1 dids = self.did_client.list_dids_extended(tmp_scope, {key1: value1}) results = [] for d in dids: results.append(d) assert_equal(len(results), 3) assert_in(did1, results) assert_in(did2, results) assert_in(did4, results) # key1, key2 dids = self.did_client.list_dids_extended(tmp_scope, { key1: value1, key2: value2 }) results = [] for d in dids: results.append(d) assert_equal(len(results), 2) assert_in(did1, results) assert_in(did4, results) # key1, key2, key 3 dids = self.did_client.list_dids_extended(tmp_scope, { key1: value1, key2: value2, key3: value3 }) results = [] for d in dids: results.append(d) assert_equal(len(results), 0) # key3 = unique value dids = self.did_client.list_dids_extended(tmp_scope, {key3: value_unique}) results = [] for d in dids: results.append(d) assert_equal(len(results), 1) assert_in(did4, results)
def test_get_context(self): res = self.view.get_context_data(object=self.user) nt.assert_in('guid', res) nt.assert_equal(res.get('guid'), self.user._id)
def test_encode_headers(self): test_string_headers = [u'id', u'namé'] test_float_headers = [u'id', u'näme', 2.0] test_int_headers = [u'id', u'nóm', 3] test_result_string_headers = loader.encode_headers(test_string_headers) test_result_float_headers = loader.encode_headers(test_float_headers) test_result_int_headers = loader.encode_headers(test_int_headers) assert_in('id', test_result_string_headers) assert_in('name', test_result_string_headers) assert_in('id', test_result_float_headers) assert_in('name', test_result_float_headers) assert_in('2.0', test_result_float_headers) assert_in('id', test_result_int_headers) assert_in('nom', test_result_int_headers) assert_in('3', test_result_int_headers)
def test_list_did_meta(self): """ DID Meta (Hardcoded): List did meta """ dsns = [] tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = { 'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } add_did(scope=self.tmp_scope, name=tmp_dsn1, type="DATASET", account=self.root, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 add_did(scope=self.tmp_scope, name=tmp_dsn2, type="DATASET", account=self.root, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' add_did(scope=self.tmp_scope, name=tmp_dsn3, type="DATASET", account=self.root, meta=dataset_meta) dids = list_dids(self.tmp_scope, { 'project': 'data12_8TeV', 'version': 'f392_m920' }) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn1) dids = list_dids(self.tmp_scope, { 'project': 'data12_8TeV', 'run_number': 400001 }) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn2) dids = list_dids( self.tmp_scope, { 'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ' }) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results)
def i_should_see_the_imitated_pubs_content_on_the_page(step): httpretty.disable() httpretty.enable() assert_in('Das Boot', world.response_content)
def test_latex_to_html(): img = latextools.latex_to_html("$x^2$") nt.assert_in("data:image/png;base64,iVBOR", img)