def site(site_no): """ A single Site :return: HTTP Response """ # lists the views and formats available for a Site c = conf.URI_SITE_CLASS views_formats = LDAPI.get_classes_views_formats().get(c) try: view, mimetype = LDAPI.get_valid_view_and_format( request.args.get('_view'), request.args.get('_format'), views_formats) # if alternates model, return this info from file if view == 'alternates': instance_uri = 'http://pid.geoscience.gov.au/site/' + site_no del views_formats['renderer'] return render_alternates_view(c, uriparse.quote_plus(c), instance_uri, uriparse.quote_plus(instance_uri), views_formats, request.args.get('_format')) else: from model.site import Site try: s = Site(site_no) return s.render(view, mimetype) except ValueError: return render_template('class_site_no_record.html') except LdapiParameterError as e: return client_error_Response(e)
def _setup_hpos(self, org_dao=None): hpo_dao = HPODao() hpo_dao.insert( HPO(hpoId=UNSET_HPO_ID, name='UNSET', displayName='Unset', organizationType=OrganizationType.UNSET)) hpo_dao.insert( HPO(hpoId=PITT_HPO_ID, name='PITT', displayName='Pittsburgh', organizationType=OrganizationType.HPO)) hpo_dao.insert( HPO(hpoId=AZ_HPO_ID, name='AZ_TUCSON', displayName='Arizona', organizationType=OrganizationType.HPO)) self.hpo_id = PITT_HPO_ID org_dao = OrganizationDao() org_dao.insert( Organization(organizationId=AZ_ORG_ID, externalId='AZ_TUCSON_BANNER_HEALTH', displayName='Banner Health', hpoId=AZ_HPO_ID)) created_org = org_dao.insert( Organization(organizationId=PITT_ORG_ID, externalId='PITT_BANNER_HEALTH', displayName='PITT display Banner Health', hpoId=PITT_HPO_ID)) self.organization_id = created_org.organizationId site_dao = SiteDao() created_site = site_dao.insert( Site(siteName='Monroeville Urgent Care Center', googleGroup='hpo-site-monroeville', mayolinkClientNumber=7035769, organizationId=PITT_ORG_ID, hpoId=PITT_HPO_ID)) self.site_id = created_site.siteId site_dao.insert( Site(siteName='Phoenix Urgent Care Center', googleGroup='hpo-site-bannerphoenix', mayolinkClientNumber=7035770, organizationId=PITT_ORG_ID, hpoId=PITT_HPO_ID)) site_dao.insert( Site(siteName='Phoenix clinic', googleGroup='hpo-site-clinic-phoenix', mayolinkClientNumber=7035770, organizationId=AZ_ORG_ID, hpoId=AZ_HPO_ID))
def generate_site(self): site = Site() site.home_page = "https://totheglory.im/putrss.php?par=dnZ2MTA1LDEwNywxMDQsMTA2LDUxLDUyLDUzLDU0LDEwOCwxMDksNjIsNjMsNjcsNjksNzAsNzMsNzYsNzUsNzQsODcsODgsOTksOTAsODIsODMsNTksNTcsNTgsMTAzLDEwMSw2MCw5MSw4NCw5Mnx8fGI3ODRlNmI0ZjkzMGY3ODJjNjFmMGNhNjZjMGY1NzY0eno=&ssl=yes" site.login_needed = False site.login_headers = { "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36", "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "accept-language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,ja;q=0.5", "upgrade-insecure-requests": "1", } self.site = site
def test_insert(self): site = Site(siteName='site', googleGroup='*****@*****.**', mayolinkClientNumber=12345, hpoId=PITT_HPO_ID) created_site = self.site_dao.insert(site) new_site = self.site_dao.get(created_site.siteId) site.siteId = created_site.siteId self.assertEquals(site.asdict(), new_site.asdict()) self.assertEquals( site.asdict(), self.site_dao.get_by_google_group( '*****@*****.**').asdict())
def generate_site(self): self.passKey = Config.get("mteam_passkey") site = Site() site.home_page = "https://kp.m-team.cc/torrentrss.php?https=1&rows=50&cat410=1&cat429=1&cat424=1&cat430=1&cat426=1&cat437=1&cat431=1&cat432=1&cat436=1&cat425=1&cat433=1&cat411=1&cat412=1&cat413=1&cat440=1&isize=1&passkey=" + self.passKey site.login_needed = False site.login_headers = { "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36", "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "accept-language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,ja;q=0.5", "upgrade-insecure-requests": "1", } self.site = site
def generate_site(self): site = Site() site.home_page = "http://hdhome.org/torrents.php" site.login_page = "http://hdhome.org/takelogin.php" site.login_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,zh-TW;q=0.2,ja;q=0.2", "Accept-Encoding": "gzip, deflate, br", "Connection": "keep-alive", "DNT": "1", "Host": "hdhome.org", "Referer": "http://hdhome.org/index.php", "Upgrade-Insecure-Requests": "1", "Cookie": "__cfduid=d485315600280be35365e84ee5ec16d651512109005; c_secure_uid=ODU1MzM%3D; c_secure_pass=847ef9da672b8f7ef43c2952b06473b7; c_secure_ssl=bm9wZQ%3D%3D; c_secure_tracker_ssl=bm9wZQ%3D%3D; c_secure_login=bm9wZQ%3D%3D" } site.login_needed = True site.login_verify_css_selector = "#nav_block a.User_Name b" site.login_verify_str = Config.get("hdh_username") self.site = site return site
def new_comment(): try: data = request.get_json() logger.info(data) # validate token: retrieve site entity token = data.get('token', '') site = Site.select().where(Site.token == token).get() if site is None: logger.warn('Unknown site %s' % token) abort(400) # honeypot for spammers captcha = data.get('captcha', '') if captcha: logger.warn('discard spam: data %s' % data) abort(400) processor.enqueue({'request': 'new_comment', 'data': data}) except: logger.exception("new comment failure") abort(400) return "OK"
def test_overwrite_existing_pairing(self): participant_id = 99 created = self.dao.insert( Participant(participantId=participant_id, biobankId=2, hpoId=self._test_db.hpo_id, providerLink=make_primary_provider_link_for_id( self._test_db.hpo_id))) self.participant_summary_dao.insert(self.participant_summary(created)) self.assertEquals(created.hpoId, self._test_db.hpo_id) # sanity check other_hpo = HPODao().insert( HPO(hpoId=PITT_HPO_ID + 1, name='DIFFERENT_HPO')) other_site = SiteDao().insert( Site(hpoId=other_hpo.hpoId, siteName='Arbitrary Site', googleGroup='*****@*****.**')) with self.dao.session() as session: self.dao.add_missing_hpo_from_site(session, participant_id, other_site.siteId) # Original Participant + summary is affected. refetched = self.dao.get(participant_id) self.assertEquals(refetched.hpoId, other_hpo.hpoId) self.assertEquals(refetched.providerLink, make_primary_provider_link_for_id(other_hpo.hpoId)) self.assertEquals( self.participant_summary_dao.get(participant_id).hpoId, other_hpo.hpoId)
def generate_site(token): site = Site.select().where(Site.token == token).get() rss_title = get_template("rss_title_message").render(site=site.name) md = markdown.Markdown() items = [] for row in ( Comment.select() .join(Site) .where(Site.token == token, Comment.published) .order_by(-Comment.published) .limit(10) ): item_link = "%s://%s%s" % (config.get(config.RSS_PROTO), site.url, row.url) items.append( PyRSS2Gen.RSSItem( title="%s - %s://%s%s" % (config.get(config.RSS_PROTO), row.author_name, site.url, row.url), link=item_link, description=md.convert(row.content), guid=PyRSS2Gen.Guid("%s/%d" % (item_link, row.id)), pubDate=row.published, ) ) rss = PyRSS2Gen.RSS2( title=rss_title, link="%s://%s" % (config.get(config.RSS_PROTO), site.url), description="Commentaires du site '%s'" % site.name, lastBuildDate=datetime.now(), items=items, ) rss.write_xml(open(config.get(config.RSS_FILE), "w"), encoding="utf-8")
def generate_site(token): site = Site.select().where(Site.token == token).get() rss_title = get_template('rss_title_message').render(site=site.name) md = markdown.Markdown() items = [] for row in (Comment.select().join(Site).where( Site.token == token, Comment.published).order_by(-Comment.published).limit(10)): item_link = '%s://%s%s' % (config.get( config.RSS_PROTO), site.url, row.url) items.append( PyRSS2Gen.RSSItem( title='%s - %s://%s%s' % (config.get( config.RSS_PROTO), row.author_name, site.url, row.url), link=item_link, description=md.convert(row.content), guid=PyRSS2Gen.Guid('%s/%d' % (item_link, row.id)), pubDate=row.published, )) rss = PyRSS2Gen.RSS2( title=rss_title, link='%s://%s' % (config.get(config.RSS_PROTO), site.url), description='Commentaires du site "%s"' % site.name, lastBuildDate=datetime.now(), items=items, ) rss.write_xml(open(config.get(config.RSS_FILE), 'w'), encoding='utf-8')
def generate_site(self): site = Site() site.home_page = "http://www.miui.com/forum.php?mod=forumdisplay&fid=5&filter=author&orderby=dateline" site.login_headers = { "User-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "Accept-encoding": "gzip, deflate", "Accept-language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,ja;q=0.5", "Connection": "keep-alive", "DNT": "1", "Host": "www.miui.com", "Upgrade-insecure-requests": "1", "Cache-control": "max-age=0", "Cookie": "UM_distinctid=163dfb3e2ff73d-0a9bc0e1447ce4-336a7706-13c680-163dfb3e3005eb; __utmc=230417408; __utmz=230417408.1528538555.3.3.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; Hm_lvt_3c5ef0d4b3098aba138e8ff4e86f1329=1528511334,1528511372,1528511379,1528538555; PHPSESSID=vnc60biqa61d31ih5b930abm41; MIUI_2132_widthauto=-1; CNZZDATA5677709=cnzz_eid%3D1297791920-1528461510-http%253A%252F%252Fwww.miui.com%252F%26ntime%3D1528801426; CNZZDATA1270690907=893238151-1528481971-https%253A%252F%252Fwww.baidu.com%252F%7C1528807405; CNZZDATA1270691464=1221855440-1528592121-%7C1528807818; __utma=230417408.2038836981.1528511192.1528948475.1529018798.24; MIUI_2132_saltkey=Dq25112M; MIUI_2132_lastvisit=1529015245; MIUI_2132_ulastactivity=ea4aWC6qjF726cp14stuTP38lm5%2Fz1U8KY4yFHUvbP24ahpFdgOCXE0; MIUI_2132_auth=4b02CoTQT5q2tpsUdjzpS9HF2DtTQnq6AMZYKCILTY8Y%2F%2F83kA39sQ; lastLoginTime=7acaYS1hE%2B2Q74w3%2BvqcMrmn0iQGS%2FnaSlI4t7NM0YtvqlbAQ70Q; MIUI_2132_noticeTitle=1; MIUI_2132_home_diymode=1; CNZZDATA30049650=cnzz_eid%3D1453184979-1528466198-null%26ntime%3D1529015956; MIUI_2132_nofavfid=1; CNZZDATA5557939=cnzz_eid%3D1504230810-1528462019-null%26ntime%3D1529017909; MIUI_2132_smile=3D1; MIUI_2132_viewid=tid_15212628; CNZZDATA2441309=cnzz_eid%3D1987410948-1528462183-null%26ntime%3D1529016417; MIUI_2132_forum_lastvisit=D_40_1529019167D_772_1529019395; MIUI_2132_clearUserdata=forum; MIUI_2132_seccodeS00=783dKdaY3bkLwO0BrfTvBubeflSwHQFV7Do%2F6GbCNTvNarru9KvxpguJH7TSapSAgQJUewp0BQbY; MIUI_2132_checkpm=1; MIUI_2132_sendmail=1; __utmt=1; MIUI_2132_visitedfid=3D37D772D48D773D40D5; MIUI_2132_lastact=1529019546%09forum.php%09; __utmb=230417408.49.10.1529018798; Hm_lpvt_3c5ef0d4b3098aba138e8ff4e86f1329=1529019509" # "Cookie": "UM_distinctid=163dfb3e2ff73d-0a9bc0e1447ce4-336a7706-13c680-163dfb3e3005eb; CNZZDATA5677709=cnzz_eid%3D1297791920-1528461510-http%253A%252F%252Fwww.miui.com%252F%26ntime%3D1528477020; CNZZDATA1270690907=893238151-1528481971-https%253A%252F%252Fwww.baidu.com%252F%7C1528481971; __utmc=230417408; __utmz=230417408.1528538555.3.3.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; Hm_lvt_3c5ef0d4b3098aba138e8ff4e86f1329=1528511334,1528511372,1528511379,1528538555; PHPSESSID=vnc60biqa61d31ih5b930abm41; MIUI_2132_widthauto=-1; CNZZDATA2441309=cnzz_eid%3D1987410948-1528462183-null%26ntime%3D1528586803; CNZZDATA30049650=cnzz_eid%3D1453184979-1528466198-null%26ntime%3D1528585841; __utma=230417408.2038836981.1528511192.1528549122.1528589392.5; CNZZDATA5557939=cnzz_eid%3D1504230810-1528462019-null%26ntime%3D1528586646; MIUI_2132_saltkey=sF6wQsSz; MIUI_2132_lastvisit=1528586043; MIUI_2132_visitedfid=773; MIUI_2132_ulastactivity=426f3zvob00mxZWwQ8FWbaETgRqM07T%2FhlJ%2FdhF%2F34sFvhOFvrFk5fg; MIUI_2132_auth=443fj0wdiMkvdCfJKHGlfDsueGlS1sPWf%2BJ%2BQMa323mysEuk6RBvZHg; lastLoginTime=d9e2yZbafd8tt3%2BIQc55QkmXvFWlG588oMrLYGlAZoyMMlgcAOs7; MIUI_2132_forum_lastvisit=D_773_1528589818; MIUI_2132_noticeTitle=1; MIUI_2132_checkpm=1; MIUI_2132_lastact=1528590043%09home.php%09misc; MIUI_2132_sendmail=1; __utmb=230417408.13.10.1528589392; Hm_lpvt_3c5ef0d4b3098aba138e8ff4e86f1329=1528589985" } site.login_needed = True site.login_verify_css_selector = "#hd_u_name" site.login_verify_str = "\n 薛定谔的小仓鼠 " return site
def show(self): self.site = Site.all().get() try: self.doc = db.GqlQuery("SELECT * FROM Document WHERE document_id = :1", self.params.get("id")).get() if self.doc.category_id: self.category = db.GqlQuery("SELECT * FROM Category where category_id= :1", self.doc.category_id).get() except Exception, ex: self.render(text="Exception: %s" % ex)
def generate_site(cls): site = Site() site.home_page = "http://u.ishuhui.com/" site.login_page = "http://u.ishuhui.com/login" site.login_needed = True site.login_verify_str = Config.get("shuhui_nickname") site.login_username = Config.get("shuhui_username") site.login_password = Config.get("shuhui_password") return site
def new_form_comment(): try: data = request.form # add client IP if provided by HTTP proxy ip = "" if "X-Forwarded-For" in request.headers: ip = request.headers["X-Forwarded-For"] # log logger.info(data) # validate token: retrieve site entity token = data.get("token", "") site = Site.select().where(Site.token == token).get() if site is None: logger.warn("Unknown site %s" % token) abort(400) # honeypot for spammers captcha = data.get("captcha", "") if captcha: logger.warn("discard spam: data %s" % data) abort(400) url = data.get("url", "") author_name = data.get("author", "").strip() author_gravatar = data.get("email", "").strip() author_site = data.get("site", "").lower().strip() if author_site and author_site[:4] != "http": author_site = "http://" + author_site message = data.get("message", "") created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") # add a row to Comment table comment = Comment( site=site, url=url, author_name=author_name, author_site=author_site, author_gravatar=author_gravatar, content=message, created=created, notified=None, published=None, ip=ip, ) comment.save() except: logger.exception("new comment failure") abort(400) return redirect("/redirect/", code=302)
def test_participant_pairing_updates_on_change(self): TIME = datetime.datetime(2018, 1, 1) TIME2 = datetime.datetime(2018, 1, 2) provider_link = '[{"organization": {"reference": "Organization/AZ_TUCSON"}, "primary": true}]' site = Site(siteName='site', googleGroup='*****@*****.**', mayolinkClientNumber=12345, hpoId=PITT_HPO_ID, organizationId=PITT_ORG_ID) created_site = self.site_dao.insert(site) with FakeClock(TIME): p = Participant(participantId=1, biobankId=2, siteId=created_site.siteId) self.participant_dao.insert(p) fetch_p = self.participant_dao.get(p.participantId) updated_p = self.participant_dao.get(fetch_p.participantId) p_summary = self.ps_dao.insert(self.participant_summary(updated_p)) with FakeClock(TIME2): update_site_parent = Site(siteId=created_site.siteId, siteName='site2', googleGroup='*****@*****.**', mayolinkClientNumber=123456, hpoId=AZ_HPO_ID, organizationId=AZ_ORG_ID) self.site_dao.update(update_site_parent) updated_p = self.participant_dao.get(fetch_p.participantId) ps = self.ps_dao.get(p_summary.participantId) ph = self.ps_history.get([updated_p.participantId, 1]) self.assertEquals(update_site_parent.hpoId, updated_p.hpoId) self.assertEquals(update_site_parent.organizationId, updated_p.organizationId) self.assertEquals(ps.organizationId, update_site_parent.organizationId) self.assertEquals(ps.hpoId, update_site_parent.hpoId) self.assertEquals(ps.organizationId, update_site_parent.organizationId) self.assertEquals(ph.organizationId, update_site_parent.organizationId) self.assertEquals(updated_p.providerLink, provider_link) self.assertEquals(ps.lastModified, TIME2)
def test_update(self): site = Site(siteName='site', googleGroup='*****@*****.**', mayolinkClientNumber=12345, hpoId=PITT_HPO_ID) created_site = self.site_dao.insert(site) new_site = Site(siteId=created_site.siteId, siteName='site2', googleGroup='*****@*****.**', mayolinkClientNumber=123456, hpoId=UNSET_HPO_ID) self.site_dao.update(new_site) fetched_site = self.site_dao.get(created_site.siteId) self.assertEquals(new_site.asdict(), fetched_site.asdict()) self.assertEquals( new_site.asdict(), self.site_dao.get_by_google_group( '*****@*****.**').asdict()) self.assertIsNone( self.site_dao.get_by_google_group('*****@*****.**'))
def _setup_data(self): organization_dao = OrganizationDao() site_dao = SiteDao() org_1 = organization_dao.insert( Organization(externalId='ORG_1', displayName='Organization 1', hpoId=PITT_HPO_ID, resourceId='o123456')) organization_dao.insert( Organization(externalId='AARDVARK_ORG', displayName='Aardvarks Rock', hpoId=PITT_HPO_ID, resourceId='o123457')) site_dao.insert( Site(siteName='Site 1', googleGroup='hpo-site-1', mayolinkClientNumber=123456, organizationId=org_1.organizationId, siteStatus=SiteStatus.ACTIVE, enrollingStatus=EnrollingStatus.ACTIVE, launchDate=datetime.datetime(2016, 1, 1), notes='notes', latitude=12.1, longitude=13.1, directions='directions', physicalLocationName='locationName', address1='address1', address2='address2', city='Austin', state='TX', zipCode='78751', phoneNumber='555-555-5555', adminEmails='[email protected], [email protected]', link='http://www.example.com')) site_dao.insert( Site(siteName='Zebras Rock', googleGroup='aaaaaaa', organizationId=org_1.organizationId, enrollingStatus=EnrollingStatus.INACTIVE, siteStatus=SiteStatus.INACTIVE))
def generate_site(self): site = Site() site.home_page = "https://pt.sjtu.edu.cn/torrents.php" site.login_page = "https://pt.sjtu.edu.cn/takelogin.php" site.login_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,zh-TW;q=0.2", "Content-Type": "application/x-www-form-urlencoded" } site.login_needed = True site.login_verify_css_selector = "#userbar span.nobr a b" site.login_verify_str = Config.get("putao_username") site.login_username = Config.get("putao_username") site.login_password = Enigma.decrypt(Config.get("putao_password")) return site
def new_form_comment(): try: data = request.form logger.info('form data ' + str(data)) # validate token: retrieve site entity token = data.get('token', '') site = Site.select().where(Site.token == token).get() if site is None: logger.warn('Unknown site %s' % token) abort(400) # honeypot for spammers captcha = data.get('remarque', '') if captcha: logger.warn('discard spam: data %s' % data) abort(400) url = data.get('url', '') author_name = data.get('author', '').strip() author_gravatar = data.get('email', '').strip() author_site = data.get('site', '').lower().strip() if author_site and author_site[:4] != 'http': author_site = 'http://' + author_site message = data.get('message', '') # anti-spam again if not url or not author_name or not message: logger.warn('empty field: data %s' % data) abort(400) check_form_data(data) # add a row to Comment table created = datetime.now().strftime('%Y-%m-%d %H:%M:%S') comment = Comment( site=site, url=url, author_name=author_name, author_site=author_site, author_gravatar=author_gravatar, content=message, created=created, notified=None, published=None, ) comment.save() except: logger.exception('new comment failure') abort(400) return redirect('/redirect/', code=302)
def _setup_unset_enrollment_site(self): site_dao = SiteDao() organization_dao = OrganizationDao() org_2 = organization_dao.insert( Organization(externalId='ORG_2', displayName='Organization 2', hpoId=PITT_HPO_ID)) site_dao.insert( Site(siteName='not enrolling site', googleGroup='not_enrolling_dot_com', organizationId=org_2.organizationId, enrollingStatus=EnrollingStatus.UNSET, siteStatus=SiteStatus.INACTIVE))
def _setup_hpos(self): hpo_dao = HPODao() hpo_dao.insert(HPO(hpoId=UNSET_HPO_ID, name='UNSET')) hpo_dao.insert( HPO(hpoId=PITT_HPO_ID, name='PITT', organizationType=OrganizationType.HPO)) self.hpo_id = PITT_HPO_ID site_dao = SiteDao() created_site = site_dao.insert( Site(siteName='Monroeville Urgent Care Center', googleGroup='hpo-site-monroeville', consortiumName='Pittsburgh', mayolinkClientNumber=7035769, hpoId=PITT_HPO_ID)) self.site_id = created_site.siteId site_dao.insert( Site(siteName='Phoenix Urgent Care Center', googleGroup='hpo-site-bannerphoenix', consortiumName='Pittsburgh', mayolinkClientNumber=7035770, hpoId=PITT_HPO_ID))
def test_schema_biobank_order_and_datetime_roundtrip(self): bo_id = 1 now = isodate.parse_datetime('2016-01-04T10:28:50-04:00') write_session = self.database.make_session() p = self._create_participant(write_session) site = Site(siteId=1, siteName='site', googleGroup='*****@*****.**', mayolinkClientNumber=12345, hpoId=1) write_session.add(site) write_session.commit() bo = BiobankOrder(biobankOrderId=bo_id, participantId=p.participantId, created=now, sourceSiteId=1, version=1, sourceUsername='******', collectedSiteId=1, collectedUsername='******', processedSiteId=1, processedUsername='******', finalizedSiteId=1, finalizedUsername='******', collectedNote=r'written by ' + self.fake.last_name(), processedNote=u'd', finalizedNote=u'e', logPosition=LogPosition()) bo.identifiers.append(BiobankOrderIdentifier(system='a', value='b')) bo.samples.append( BiobankOrderedSample(test='a', description=u'a test invented by ' + self.fake.first_name(), processingRequired=True, collected=now, processed=now, finalized=now)) write_session.add(bo) write_session.commit() read_session = self.database.make_session() bo = read_session.query(BiobankOrder).get(bo_id) self.assertEquals( bo.created.isoformat(), now.astimezone(tzutc()).replace(tzinfo=None).isoformat())
def get_products(self): sites_list = [] products = [] for section in self.config.sections(): configs = dict(self.config.items(section)) for c in configs: key = c.upper() if key in SiteType._value2member_map_: site_type = SiteType(c.upper()) site = Site(site_type, configs.get(c)) sites_list.append(site) products.append( Product(section, configs.get(StringConstants.SPREADSHEET_ID), sites_list)) return products
def _site_from_row(row, hpo_dao): hpo_name = row['HPO Site ID'] hpo = hpo_dao.get_by_name(hpo_name) if not hpo: logging.error('Invalid HPO %r; skipping %s.', hpo_name, row) return None mayolink_client_num_str = row['MayoLINK Client #'] google_group = row['Google Group Email Address'] if not google_group.endswith(_GOOGLE_GROUP_SUFFIX): logging.error( 'Invalid google group: %r does not end with %r; skipping %s.', google_group, _GOOGLE_GROUP_SUFFIX, row) return None google_group_prefix = google_group[:-len(_GOOGLE_GROUP_SUFFIX)].lower() consortium = row['Group (Consortium)'] return Site(consortiumName=consortium, siteName=row['Site'], mayolinkClientNumber=(int(mayolink_client_num_str) if mayolink_client_num_str else None), googleGroup=google_group_prefix, hpoId=hpo.hpoId)
class NormalAlert(Login): site = Site() size_factor = 1.074 # the shown size on web page is not accurate is_login = False download_link = "https://kp.m-team.cc/download.php?id=%s&https=1" def __init__(self): self.site.home_page = "https://kp.m-team.cc/torrents.php" self.site.login_page = "https://kp.m-team.cc/takelogin.php" self.site.login_headers = { "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", "accept-language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,ja;q=0.5", "cookie": "c_lang_folder=cht; tp=ZTNiZTE4ZDYwYmZiOTI1ZjQzNGRmMDhlOTY4NTJmODExZjYwODIxZQ%3D%3D; cf_chl_prog=a9; cf_clearance=IMq1zdVD.N7tjymYWcRihWCPKFdgOpx65I4YP.VXmcI-1630648319-0-150", "dnt": "1", "referer": "https://kp.m-team.cc/index.php", "sec-ch-ua": "\"Google Chrome\";v=\"93\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"93\"", "sec-ch-ua-mobile": "?0", "sec-ch-ua-platform": "\"macOS\"", "sec-fetch-dest": "document", "sec-fetch-mode": "navigate", "sec-fetch-site": "same-origin", "sec-fetch-user": "******", "upgrade-insecure-requests": "1", "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36" } self.site.login_needed = True self.site.login_verify_css_selector = "#info_block span.nowrap a b" self.site.login_verify_str = Config.get("mteam_username") self.site.login_username = Config.get("mteam_username") self.site.login_password = Config.get("mteam_password") def login_if_not(self): if not self.is_login: self.generate_site() self.is_login = self.login(self.site) assert self.is_login def generate_site(self): return self.site def build_post_data(self, site): data = dict() data['username'] = site.login_username data['password'] = site.login_password return data def crawl(self, print_log=True): self.login_if_not() soup_obj = HttpUtils.get(self.site.home_page, headers=self.site.login_headers) seeds = self.parse(soup_obj) if print_log: for seed in seeds: print(seed) return seeds def parse(self, soup_obj): assert soup_obj is not None tr_list = soup_obj.select("table.torrents tr") seeds = [] cnt = 0 for tr in tr_list: cnt += 1 if cnt == 1: # skip the caption tr continue seed = SeedInfo() td_list = tr.select("td.rowfollow") if len(td_list) < 9: # skip embedded contents continue seed.since = HttpUtils.get_content(td_list[2], "span") seed.size = float(self.parse_size(td_list[3])) seed.upload_num = int(self.clean_tag(td_list[4])) seed.download_num = int(self.clean_tag(td_list[5])) seed.finish_num = int(self.clean_tag(td_list[6])) seed.done = self.clean_tag(td_list[7]) seed.working = "peer-active" in td_list[7]['class'] td_title = tr.select("td.torrenttr tr td") seed.sticky = len(td_title[0].select("img[alt=\"Sticky\"]")) seed.title = td_title[0].select("a")[0]["title"] seed.url = td_title[0].select("a")[0]['href'] seed.free = len(td_title[0].select("img[alt=\"Free\"]")) > 0 seed.hot = len(td_title[0].select("font.hot")) > 0 if len(td_title[0].select("img[alt=\"50%\"]")) > 0: seed.discount = 50 elif len(td_title[0].select("img[alt=\"30%\"]")) > 0: seed.discount = 30 elif seed.free: seed.discount = 0 else: seed.discount = 100 seed.id = self.parse_id(seed.url) seeds.append(seed) print("Crawl: " + str(len(seeds))) if len(seeds) < 10: EmailSender.send(u"无法解析页面", Config.get("mteam_username")) return seeds def parse_size(self, soup_obj): assert soup_obj is not None assert len(soup_obj.contents) == 3 size_num = round(float(soup_obj.contents[0]) * self.size_factor, 2) size_unit = soup_obj.contents[2] return HttpUtils.pretty_format(str(size_num) + str(size_unit), "MB") @staticmethod def clean_tag(soup_obj): assert soup_obj is not None html = str(soup_obj.contents[0]) html = html.replace(',', '') m = re.search(">(\d+\.*\d?)<", html) if m: ret = m.group(1) else: ret = html return ret @staticmethod def parse_id(url): m = re.search("id=(\d+)&", url) assert m is not None return m.group(1) def filter(self, data): # common strategy # 1. hasn't been found before # 2. not exceed max size max_size = Config.get("seed_max_size_mb") data = list( filter(lambda x: x.size < max_size and Cache().get(x.id) is None, data)) # customized strategy final_seeds = [] if Config.get("mteam_strategy") == "easy": final_seeds = self.easy_strategy(data) elif Config.get("mteam_strategy") == "medium": final_seeds = self.medium_strategy(data) elif Config.get("mteam_strategy") == "hard": final_seeds = self.hard_strategy(data) # white list white_lists = Config.get("putao_white_list").split("|") for seed in data: for white_list in white_lists: if re.search(white_list, seed.title): final_seeds.append(seed) break for seed in final_seeds: print("Find valuable seed: " + str(seed)) return final_seeds def sort_seed(self, seeds): # sort seed, sticky and free seed has highest weight, the less discount, # the more download, the less upload, the less size, the better for x in seeds: print("score=" + str( int(x.sticky) * 50 + int(x.free) * 50 + round((100000 / (x.discount + 10)) * x.download_num / (x.upload_num + 0.01) / (x.size + 5000), 3)) + " >>>> " + str(x)) seeds.sort(key=lambda x: int(x.sticky) * 50 + int(x.free) * 50 + round( (100000 / (x.discount + 10)) * x.download_num / (x.upload_num + 0.01) / (x.size + 5000), 3), reverse=True) return seeds # do not add too many seed at one time def limit_total_size(self, seeds, limit): size_cnt = 0 filtered_seeds = [] for seed in seeds: if seed.size > limit: continue size_cnt += seed.size if size_cnt < limit: filtered_seeds.append(seed) return filtered_seeds def easy_strategy(self, data): filtered_seeds = list( filter( lambda x: (x.upload_num != 0 and round( x.download_num / x.upload_num, 1) >= 1.5) and (x.free or x.sticky or (x.discount <= 50 and round( x.download_num / x.upload_num) >= 2) or (x.discount > 50 and round(x.download_num / x.upload_num) >= 3 and x.upload_num <= 10)), data)) filtered_seeds = self.sort_seed(filtered_seeds) final_seeds = self.limit_total_size(filtered_seeds, 12 * 1024) return final_seeds def medium_strategy(self, data): filtered_seeds = list( filter( lambda x: (x.upload_num != 0 and round( x.download_num / x.upload_num, 1) >= 2) and (x.free or (x.sticky and x.discount <= 50) or (x.discount <= 50 and round(x.download_num / x.upload_num) >= 2) or ( (x.discount > 50 and round(x.download_num / x.upload_num) >= 3 and x.upload_num <= 10))), data)) filtered_seeds = self.sort_seed(filtered_seeds) # only limited number of no discount seed is allowed not_free_limit = 1 not_free_cnt = 0 filtered_seeds_lvl2 = [] for seed in filtered_seeds: if not seed.free and not seed.sticky and seed.discount > 50 and not_free_cnt < not_free_limit: filtered_seeds_lvl2.append(seed) elif seed.free or seed.sticky or seed.discount <= 50: filtered_seeds_lvl2.append(seed) final_seeds = self.limit_total_size(filtered_seeds_lvl2, 10 * 1024) return final_seeds def hard_strategy(self, data): filtered_seeds = list( filter( lambda x: (x.upload_num != 0 and round(x.download_num / x.upload_num, 1) >= 3) and (x.free or (x.sticky and x.discount <= 50) or (x.discount <= 50 and round( x.download_num / x.upload_num) >= 5)), data)) filtered_seeds = self.sort_seed(filtered_seeds) final_seeds = self.limit_total_size(filtered_seeds, 9 * 1024) return final_seeds # general download way for both normal user and warned user def download_seed_file(self, seed_id): self.login_if_not() data = { "id": seed_id, "type": "ratio", "hidenotice": "1", "letmedown": "ratio" } res = HttpUtils.post("https://kp.m-team.cc/downloadnotice.php?", data=data, headers=self.site.login_headers, returnRaw=True) try: with open("%s.torrent" % seed_id, "wb") as f: f.write(res.content) except Exception as e: print("Cannot download seed file: " + seed_id, e) def action(self, candidate_seeds): if len(candidate_seeds) == 0: return for seed in candidate_seeds: self.download_seed_file(seed.id) success_seeds, fail_seeds = SeedManager.try_add_seeds(candidate_seeds) for success_seed in success_seeds: Cache().set_with_expire(success_seed.id, str(success_seed), 5 * 864000) # make the failed seed cool down for some time for fail_seed in fail_seeds: cool_down_time = 3600 # 1 hour if fail_seed.free or fail_seed.sticky: cool_down_time = 300 # 5 minutes elif fail_seed.discount <= 50: cool_down_time = 1800 # 30 minutes Cache().set_with_expire(fail_seed.id, str(fail_seed), cool_down_time) def check(self): self.action(self.filter(self.crawl(False))) def init(self): # enable adult torrent and close pic self.init_setting() # # crawl and add to cache # seeds = self.crawl() # # # common strategy # # 1. hasn't been found before # # 2. not exceed max size # max_size = Config.get("seed_max_size_mb") # seeds = list(filter(lambda x: x.size < max_size and Cache().get(x.id) is None, seeds)) # # for seed in seeds: # print("Add seed: " + str(seed)) # Cache().set_with_expire(seed.id, str(seed), 5 * 864000) def add_seed(self, seed_id): self.login_if_not() self.download_seed_file(seed_id) seeds = list(filter(lambda x: x.id == seed_id, self.crawl(False))) assert len(seeds) == 1 SeedManager.try_add_seeds(seeds) Cache().set_with_expire(seeds[0].id, str(seeds[0]), 5 * 864000) def init_setting(self): self.login_if_not() # enable adult torrent setting_url = "https://kp.m-team.cc/usercp.php" lab_data = { "action": "laboratory", "type": "save", "laboratory_adult_mode": "0", "laboratory_torrent_page_https": "0" } res = HttpUtils.post(url=setting_url, data=lab_data, headers=self.site.login_headers, returnRaw=True) assert res.status_code == 200 # do not show picture tracker_data = { "action": "tracker", "type": "save", "t_look": "1", # show pic "tooltip": "off", "timetype": "timealive", "appendsticky": "yes", "radio": "icon", "smalldescr": "yes", "dlicon": "yes", "bmicon": "yes", "show_hot": "yes", "showfb": "yes", "showdescription": "yes", "showimdb": "yes", "showcomment": "yes", "appendnew": "yes", "appendpicked": "yes", "showcomnum": "yes" } res = HttpUtils.post(url=setting_url, data=tracker_data, headers=self.site.login_headers, returnRaw=True) assert res.status_code == 200
def _update_site(self, hierarchy_org_obj): if hierarchy_org_obj.id is None: raise BadRequest('No id found in payload data.') google_group = self._get_value_from_identifier( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'site-id') if google_group is None: raise BadRequest( 'No organization-identifier info found in payload data.') google_group = google_group.lower() is_obsolete = ObsoleteStatus( 'OBSOLETE') if not hierarchy_org_obj.active else None resource_id = self._get_reference(hierarchy_org_obj) organization = self.organization_dao.get_by_resource_id(resource_id) if organization is None: raise BadRequest( 'Invalid partOf reference {} importing site {}'.format( resource_id, google_group)) launch_date = None launch_date_str = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'anticipatedLaunchDate', 'valueDate') if launch_date_str: try: launch_date = parse(launch_date_str).date() except ValueError: raise BadRequest('Invalid launch date {} for site {}'.format( launch_date_str, google_group)) name = hierarchy_org_obj.name mayolink_client_number = None mayolink_client_number_str = self._get_value_from_identifier( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'mayo-link-identifier') if mayolink_client_number_str: try: mayolink_client_number = int(mayolink_client_number_str) except ValueError: raise BadRequest( 'Invalid Mayolink Client # {} for site {}'.format( mayolink_client_number_str, google_group)) notes = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'notes') site_status_bool = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'schedulingStatusActive', 'valueBoolean') try: site_status = SiteStatus( 'ACTIVE' if site_status_bool else 'INACTIVE') except TypeError: raise BadRequest('Invalid site status {} for site {}'.format( site_status, google_group)) enrolling_status_bool = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'enrollmentStatusActive', 'valueBoolean') try: enrolling_status = EnrollingStatus( 'ACTIVE' if enrolling_status_bool else 'INACTIVE') except TypeError: raise BadRequest( 'Invalid enrollment site status {} for site {}'.format( enrolling_status_bool, google_group)) digital_scheduling_bool = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'digitalSchedulingStatusActive', 'valueBoolean') try: digital_scheduling_status = DigitalSchedulingStatus( 'ACTIVE' if digital_scheduling_bool else 'INACTIVE') except TypeError: raise BadRequest( 'Invalid digital scheduling status {} for site {}'.format( digital_scheduling_bool, google_group)) directions = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'directions') physical_location_name = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'locationName') address_1, address_2, city, state, zip_code = self._get_address( hierarchy_org_obj) phone = self._get_contact_point(hierarchy_org_obj, 'phone') admin_email_addresses = self._get_contact_point( hierarchy_org_obj, 'email') link = self._get_contact_point(hierarchy_org_obj, 'url') schedule_instructions = self._get_value_from_extention( hierarchy_org_obj, HIERARCHY_CONTENT_SYSTEM_PREFIX + 'schedulingInstructions') entity = Site(siteName=name, googleGroup=google_group, mayolinkClientNumber=mayolink_client_number, organizationId=organization.organizationId, hpoId=organization.hpoId, siteStatus=site_status, enrollingStatus=enrolling_status, digitalSchedulingStatus=digital_scheduling_status, scheduleInstructions=schedule_instructions, scheduleInstructions_ES='', launchDate=launch_date, notes=notes, notes_ES='', directions=directions, physicalLocationName=physical_location_name, address1=address_1, address2=address_2, city=city, state=state, zipCode=zip_code, phoneNumber=phone, adminEmails=admin_email_addresses, link=link, isObsolete=is_obsolete, resourceId=hierarchy_org_obj.id) existing_map = { entity.googleGroup: entity for entity in self.site_dao.get_all() } existing_entity = existing_map.get(entity.googleGroup) with self.site_dao.session() as session: if existing_entity: self._populate_lat_lng_and_time_zone(entity, existing_entity) if entity.siteStatus == SiteStatus.ACTIVE and \ (entity.latitude is None or entity.longitude is None): raise BadRequest( 'Active site without geocoding: {}'.format( entity.googleGroup)) new_dict = entity.asdict() new_dict['siteId'] = None existing_dict = existing_entity.asdict() existing_dict['siteId'] = None if existing_dict == new_dict: logging.info('Not updating {}.'.format( new_dict['googleGroup'])) else: for k, v in entity.asdict().iteritems(): if k != 'siteId' and k != 'googleGroup': setattr(existing_entity, k, v) self.site_dao.update_with_session(session, existing_entity) else: self._populate_lat_lng_and_time_zone(entity, None) if entity.siteStatus == SiteStatus.ACTIVE and \ (entity.latitude is None or entity.longitude is None): raise BadRequest( 'Active site without geocoding: {}'.format( entity.googleGroup)) self.site_dao.insert_with_session(session, entity) site_id = self.site_dao.get_by_google_group(google_group).siteId bq_site_update_by_id(site_id)
def _entity_from_row(self, row): google_group = row[SITE_SITE_ID_COLUMN].lower() organization = self.organization_dao.get_by_external_id( row[SITE_ORGANIZATION_ID_COLUMN].upper()) if organization is None: logging.warn('Invalid organization ID %s importing site %s', row[SITE_ORGANIZATION_ID_COLUMN].upper(), google_group) self.errors.append( 'Invalid organization ID {} importing site {}'.format( row[SITE_ORGANIZATION_ID_COLUMN].upper(), google_group)) return None launch_date = None launch_date_str = row.get(SITE_LAUNCH_DATE_COLUMN) if launch_date_str: try: launch_date = parse(launch_date_str).date() except ValueError: logging.warn('Invalid launch date %s for site %s', launch_date_str, google_group) self.errors.append('Invalid launch date {} for site {}'.format( launch_date_str, google_group)) return None name = row[SITE_SITE_COLUMN] mayolink_client_number = None mayolink_client_number_str = row.get( SITE_MAYOLINK_CLIENT_NUMBER_COLUMN) if mayolink_client_number_str: try: mayolink_client_number = int(mayolink_client_number_str) except ValueError: logging.warn('Invalid Mayolink Client # %s for site %s', mayolink_client_number_str, google_group) self.errors.append( 'Invalid Mayolink Client # {} for site {}'.format( mayolink_client_number_str, google_group)) return None notes = row.get(SITE_NOTES_COLUMN) notes_es = row.get(SITE_NOTES_COLUMN_ES) try: site_status = SiteStatus(row[SITE_STATUS_COLUMN + self.environment].upper()) except TypeError: logging.warn('Invalid site status %s for site %s', row[SITE_STATUS_COLUMN + self.environment], google_group) self.errors.append('Invalid site status {} for site {}'.format( row[SITE_STATUS_COLUMN + self.environment], google_group)) return None try: enrolling_status = EnrollingStatus(row[ENROLLING_STATUS_COLUMN + self.environment].upper()) except TypeError: logging.warn('Invalid enrollment site status %s for site %s', row[ENROLLING_STATUS_COLUMN + self.environment], google_group) self.errors.append( 'Invalid enrollment site status {} for site {}'.format( row[ENROLLING_STATUS_COLUMN + self.environment], google_group)) directions = row.get(SITE_DIRECTIONS_COLUMN) physical_location_name = row.get(SITE_PHYSICAL_LOCATION_NAME_COLUMN) address_1 = row.get(SITE_ADDRESS_1_COLUMN) address_2 = row.get(SITE_ADDRESS_2_COLUMN) city = row.get(SITE_CITY_COLUMN) state = row.get(SITE_STATE_COLUMN) zip_code = row.get(SITE_ZIP_COLUMN) phone = row.get(SITE_PHONE_COLUMN) admin_email_addresses = row.get(SITE_ADMIN_EMAIL_ADDRESSES_COLUMN) link = row.get(SITE_LINK_COLUMN) digital_scheduling_status = DigitalSchedulingStatus( row[DIGITAL_SCHEDULING_STATUS_COLUMN + self.environment].upper()) schedule_instructions = row.get(SCHEDULING_INSTRUCTIONS) schedule_instructions_es = row.get(SCHEDULING_INSTRUCTIONS_ES) return Site(siteName=name, googleGroup=google_group, mayolinkClientNumber=mayolink_client_number, organizationId=organization.organizationId, hpoId=organization.hpoId, siteStatus=site_status, enrollingStatus=enrolling_status, digitalSchedulingStatus=digital_scheduling_status, scheduleInstructions=schedule_instructions, scheduleInstructions_ES=schedule_instructions_es, launchDate=launch_date, notes=notes, notes_ES=notes_es, directions=directions, physicalLocationName=physical_location_name, address1=address_1, address2=address_2, city=city, state=state, zipCode=zip_code, phoneNumber=phone, adminEmails=admin_email_addresses, link=link)
def generate_site(self): site = Site() site.home_page = "https://totheglory.im/browse.php?c=M" site.login_page = "https://totheglory.im/takelogin.php" # encoding/compression is disabled site.login_headers = { "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36", "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", # "accept-encoding": "gzip, deflate, br", "accept-language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,ja;q=0.5", "dnt": "1", "origin": "https://totheglory.im", "referer": "https://totheglory.im/login.php?returnto=", "upgrade-insecure-requests": "1", "cache-control": "max-age=0", "cookie": "__cfduid=d38205529894764cfe4019d151d9c5ba41512649384; cf_clearance=df03bb3d6d563412e8c8fc778a67f4725aa522e1-1512649390-7200" } site.login_needed = True site.login_verify_css_selector = "table td span.smallfont b a" site.login_verify_str = Config.get(self.get_site_name() + "_username") site.login_username = site.login_verify_str site.login_password = Config.get(self.get_site_name() + "_password") site.stat_page = "https://totheglory.im/mybonus.php" self.site = site
def index(self): self.site = Site.all().get() pass
def generate_all(): for site in Site.select(): generate_site(site.token)
def _create_site(self, id_, google_group): site = Site(siteId=id_, siteName=id_, googleGroup=google_group) self.site_dao.insert(site) return site
def show(self): self.site = Site.all().get() self.category = db.GqlQuery("SELECT * FROM Category WHERE category_id = :1",self.params.get('id')).get()
def test_schema(self): session = self.database.make_session() hpo = HPO(hpoId=1, name='UNSET', displayName='No organization set', organizationType=OrganizationType.UNSET) code_book = CodeBook(codeBookId=1, created=datetime.datetime.now(), latest=True, name="pmi", system="http://foo/bar", version="v0.1.1") session.add(hpo) session.add(code_book) session.commit() organization = Organization(organizationId=1, externalId='org', displayName='Organization', hpoId=1) session.add(organization) session.commit() site = Site(siteId=1, siteName='site', googleGroup='*****@*****.**', mayolinkClientNumber=12345, organizationId=1) code1 = Code(codeId=1, codeBookId=1, system="a", value="b", shortValue="q", display=u"c", topic=u"d", codeType=CodeType.MODULE, mapped=True, created=datetime.datetime.now()) codeHistory1 = CodeHistory(codeId=1, codeBookId=1, system="a", value="b", shortValue="q", display=u"c", topic=u"d", codeType=CodeType.MODULE, mapped=True, created=datetime.datetime.now()) session.add(site) session.add(code1) session.add(codeHistory1) session.commit() code2 = Code(codeId=2, codeBookId=1, parentId=1, system="a", value="c", display=u"X", topic=u"d", codeType=CodeType.QUESTION, mapped=True, created=datetime.datetime.now()) codeHistory2 = CodeHistory(codeId=2, codeBookId=1, parentId=1, system="a", value="c", display=u"X", topic=u"d", codeType=CodeType.QUESTION, mapped=True, created=datetime.datetime.now()) session.add(code2) session.add(codeHistory2) session.commit() code3 = Code(codeId=3, codeBookId=1, parentId=2, system="a", value="d", display=u"Y", topic=u"d", codeType=CodeType.ANSWER, mapped=False, created=datetime.datetime.now()) codeHistory3 = CodeHistory(codeId=3, codeBookId=1, parentId=2, system="a", value="d", display=u"Y", topic=u"d", codeType=CodeType.ANSWER, mapped=False, created=datetime.datetime.now()) session.add(code3) session.add(codeHistory3) session.commit() session.commit() p = self._participant_with_defaults( participantId=1, version=1, biobankId=2, clientId='*****@*****.**', hpoId=hpo.hpoId, signUpTime=datetime.datetime.now(), lastModified=datetime.datetime.now()) ps = self._participant_summary_with_defaults( participantId=1, biobankId=2, lastModified=datetime.datetime.now(), hpoId=hpo.hpoId, firstName=self.fake.first_name(), middleName=self.fake.first_name(), lastName=self.fake.last_name(), email=self.fake.email(), zipCode='78751', dateOfBirth=datetime.date.today(), genderIdentityId=1, consentForStudyEnrollment=QuestionnaireStatus.SUBMITTED, consentForStudyEnrollmentTime=datetime.datetime.now(), numBaselineSamplesArrived=2) p.participantSummary = ps session.add(p) ph = self._participant_history_with_defaults( participantId=1, biobankId=2, clientId='*****@*****.**', hpoId=hpo.hpoId, signUpTime=datetime.datetime.now(), lastModified=datetime.datetime.now()) session.add(ph) session.commit() session.add( BiobankStoredSample(biobankStoredSampleId='WEB1234542', biobankId=p.biobankId, biobankOrderIdentifier='KIT', test='1UR10', confirmed=datetime.datetime.utcnow())) session.add( BiobankStoredSample( biobankStoredSampleId='WEB99999', # Sample ID must be unique. biobankId=p. biobankId, # Participant ID and test may be duplicated. biobankOrderIdentifier='KIT', test='1UR10', confirmed=datetime.datetime.utcnow())) pm = PhysicalMeasurements(physicalMeasurementsId=1, participantId=1, created=datetime.datetime.now(), resource='blah', final=False, logPosition=LogPosition()) pm2 = PhysicalMeasurements(physicalMeasurementsId=2, participantId=1, created=datetime.datetime.now(), resource='blah', final=True, amendedMeasurementsId=1, logPosition=LogPosition()) session.add(pm) session.add(pm2) session.commit() q1 = Measurement(measurementId=3, physicalMeasurementsId=pm.physicalMeasurementsId, codeSystem='codeSystem', codeValue='codeValue', measurementTime=datetime.datetime.now(), valueCodeSystem='valueCodeSystem', valueCodeValue='value3') session.add(q1) session.commit() m1 = Measurement(measurementId=1, physicalMeasurementsId=pm.physicalMeasurementsId, codeSystem='codeSystem', codeValue='codeValue', measurementTime=datetime.datetime.now(), bodySiteCodeSystem='bodySiteCodeSystem', bodySiteCodeValue='bodySiteCodeValue', valueString='a', valueDecimal=1.2, valueUnit='cm', valueCodeSystem='valueCodeSystem', valueCodeValue='value', valueDateTime=datetime.datetime.now(), qualifierId=q1.measurementId) session.add(m1) session.commit() m2 = Measurement(measurementId=2, physicalMeasurementsId=pm.physicalMeasurementsId, codeSystem='codeSystem', codeValue='codeValue', measurementTime=datetime.datetime.now(), valueCodeSystem='valueCodeSystem', valueCodeValue='value2', parentId=m1.measurementId, qualifierId=q1.measurementId) session.add(m2) session.commit() q = Questionnaire(questionnaireId=1, version=1, created=datetime.datetime.now(), lastModified=datetime.datetime.now(), resource='what?') qh = QuestionnaireHistory(questionnaireId=1, version=1, created=datetime.datetime.now(), lastModified=datetime.datetime.now(), resource='what?') qh.questions.append( QuestionnaireQuestion(questionnaireQuestionId=1, questionnaireId=1, questionnaireVersion=1, linkId="1.2.3", codeId=2, repeats=True)) qh.concepts.append( QuestionnaireConcept(questionnaireConceptId=1, questionnaireId=1, questionnaireVersion=1, codeId=1)) session.add(q) session.add(qh) session.commit() qr = QuestionnaireResponse(questionnaireResponseId=1, questionnaireId=1, questionnaireVersion=1, participantId=1, created=datetime.datetime.now(), resource='blah') qr.answers.append( QuestionnaireResponseAnswer(questionnaireResponseAnswerId=1, questionnaireResponseId=1, questionId=1, endTime=datetime.datetime.now(), valueSystem='a', valueCodeId=3, valueDecimal=123, valueString=self.fake.first_name(), valueDate=datetime.date.today())) session.add(qr) session.commit() mv = MetricsVersion(metricsVersionId=1, inProgress=False, complete=True, date=datetime.datetime.utcnow(), dataVersion=1) session.add(mv) session.commit() mb = MetricsBucket(metricsVersionId=1, date=datetime.date.today(), hpoId='PITT', metrics='blah') session.add(mb) session.commit()