def setUp(self, use_mysql=True, with_data=True): super(DvOrderApiTestBase, self).setUp(use_mysql=use_mysql, with_data=with_data) self.test_data = { "subject": "Patient/P123456789", "awardee": "PITT", "organization": "PITT_BANNER_HEALTH", "patient_status": "YES", "user": "******", "site": "hpo-site-monroeville", "authored": "2019-04-26T12:11:41", "comment": "This is comment" } self.participant_dao = ParticipantDao() self.summary_dao = ParticipantSummaryDao() self.hpo_dao = HPODao() self.hpo = self.hpo_dao.get_by_name('PITT') self.participant = Participant(hpoId=self.hpo.hpoId, participantId=123456789, biobankId=7) self.participant_dao.insert(self.participant) self.summary = self.participant_summary(self.participant) self.summary_dao.insert(self.summary)
def setUp(self, **kwargs): super(MetricsEhrApiTestBase, self).setUp(use_mysql=True, **kwargs) self.dao = ParticipantDao() self.ps_dao = ParticipantSummaryDao() self.ehr_receipt_dao = EhrReceiptDao() self.ps = ParticipantSummary() self.calendar_dao = CalendarDao() self.site_dao = SiteDao() self.hpo_dao = HPODao() self.org_dao = OrganizationDao() self.hpo_test = self._make_hpo(hpoId=TEST_HPO_ID, name=TEST_HPO_NAME, displayName='Test', organizationType=OrganizationType.UNSET) self.hpo_foo = self._make_hpo(hpoId=10, name='FOO', displayName='Foo') self.hpo_bar = self._make_hpo(hpoId=11, name='BAR', displayName='Bar') self.org_foo_a = self._make_org(organizationId=10, externalId='FOO_A', displayName='Foo A', hpoId=self.hpo_foo.hpoId) self.org_bar_a = self._make_org(organizationId=11, externalId='BAR_A', displayName='Bar A', hpoId=self.hpo_bar.hpoId)
class MetricsEhrServiceTest(SqlTestBase): def setUp(self, with_data=True, use_mysql=True): super(MetricsEhrServiceTest, self).setUp(with_data=with_data, use_mysql=use_mysql) self.service = MetricsEhrService() self.hpo_dao = HPODao() self.org_dao = OrganizationDao() self.hpo_foo = self._make_hpo(hpoId=10, name='FOO', displayName='Foo') self.hpo_bar = self._make_hpo(hpoId=11, name='BAR', displayName='Bar') self.org_foo_a = self._make_org( organizationId=10, externalId='FOO_A', displayName='Foo A', hpoId=self.hpo_foo.hpoId ) self.org_bar_a = self._make_org( organizationId=11, externalId='BAR_A', displayName='Bar A', hpoId=self.hpo_bar.hpoId ) self.org_bar_b = self._make_org( organizationId=12, externalId='BAR_B', displayName='Bar B', hpoId=self.hpo_bar.hpoId ) def _make_hpo(self, **kwargs): hpo = HPO(**kwargs) self.hpo_dao.insert(hpo) return hpo def _make_org(self, **kwargs): org = Organization(**kwargs) self.org_dao.insert(org) return org def test_get_organization_ids_from_hpo_ids(self): self.assertEqual( self.service._get_organization_ids_from_hpo_ids([self.hpo_foo.hpoId]), [self.org_foo_a.organizationId] ) self.assertEqual( self.service._get_organization_ids_from_hpo_ids([self.hpo_bar.hpoId]), [ self.org_bar_a.organizationId, self.org_bar_b.organizationId, ] ) self.assertEqual( self.service._get_organization_ids_from_hpo_ids([self.hpo_foo.hpoId, self.hpo_bar.hpoId]), [ self.org_foo_a.organizationId, self.org_bar_a.organizationId, self.org_bar_b.organizationId, ] )
def setUp(self, with_data=True, use_mysql=True): super(MetricsEhrServiceTest, self).setUp(with_data=with_data, use_mysql=use_mysql) self.service = MetricsEhrService() self.hpo_dao = HPODao() self.org_dao = OrganizationDao() self.hpo_foo = self._make_hpo(hpoId=10, name='FOO', displayName='Foo') self.hpo_bar = self._make_hpo(hpoId=11, name='BAR', displayName='Bar') self.org_foo_a = self._make_org( organizationId=10, externalId='FOO_A', displayName='Foo A', hpoId=self.hpo_foo.hpoId ) self.org_bar_a = self._make_org( organizationId=11, externalId='BAR_A', displayName='Bar A', hpoId=self.hpo_bar.hpoId ) self.org_bar_b = self._make_org( organizationId=12, externalId='BAR_B', displayName='Bar B', hpoId=self.hpo_bar.hpoId )
def refresh_data_for_metrics_cache(self, dao): updated_time = datetime.datetime.now() hpo_dao = HPODao() hpo_list = hpo_dao.get_all() for hpo in hpo_list: self.insert_cache_by_hpo(dao, hpo.hpoId, updated_time) dao.delete_old_records()
def __init__(self): super(ParticipantSummaryDao, self).__init__(ParticipantSummary, order_by_ending=_ORDER_BY_ENDING) self.hpo_dao = HPODao() self.code_dao = CodeDao() self.site_dao = SiteDao() self.organization_dao = OrganizationDao()
def __init__(self): super(OrganizationImporter, self).__init__( 'organization', OrganizationDao(), 'organizationId', 'externalId', [ ORGANIZATION_AWARDEE_ID_COLUMN, ORGANIZATION_ORGANIZATION_ID_COLUMN, ORGANIZATION_NAME_COLUMN ]) self.hpo_dao = HPODao()
def setUp(self, with_data=True, use_mysql=True): super(EhrReceiptDaoTest, self).setUp(with_data=with_data, use_mysql=use_mysql) self.setup_fake() self.calendar_dao = CalendarDao() self.org_dao = OrganizationDao() self.hpo_dao = HPODao() self.participant_dao = ParticipantDao() self.summary_dao = ParticipantSummaryDao() self.ehr_receipt_dao = EhrReceiptDao() self._setup_initial_data()
def main(args): with open(args.file, 'r') as csv_file: reader = csv.DictReader(csv_file) hpo_dao = HPODao() existing_hpo_map = {hpo.name: hpo for hpo in hpo_dao.get_all()} hpo_id = len(existing_hpo_map) with hpo_dao.session() as session: for row in reader: name = row['Organization ID'] display_name = row['Name'] organization_type = OrganizationType(row['Type']) existing_hpo = existing_hpo_map.get(name) if existing_hpo: existing_type = existing_hpo.organizationType or OrganizationType.UNSET if (existing_hpo.displayName != display_name or existing_type != organization_type): existing_hpo_dict = existing_hpo.asdict() existing_hpo.displayName = display_name existing_hpo.organizationType = organization_type hpo_dao.update_with_session(session, existing_hpo) logging.info('Updating HPO: old = %s, new = %s', existing_hpo_dict, existing_hpo.asdict()) else: hpo = HPO(hpoId=hpo_id, name=name, displayName=display_name, organizationType=organization_type) hpo_dao.insert_with_session(session, hpo) logging.info('Inserting HPO: %s', hpo.asdict()) hpo_id += 1 logging.info('Done.')
def _compute(): out = {} # Using a session here should put all following SQL invocations into a # non-locking read transaction per # https://dev.mysql.com/doc/refman/5.7/en/innodb-consistent-read.html now = clock.CLOCK.now() test_hpo = HPODao().get_by_name(TEST_HPO_NAME) with database_factory.make_server_cursor_database().session() as session: for (key, sql, valuef, params) in _SQL_AGGREGATIONS: sql = replace_years_old(sql) out[key] = [] p = { 'now': now, 'test_hpo_id': test_hpo.hpoId, 'test_email_pattern': TEST_EMAIL_PATTERN, 'not_withdrawn_status': WithdrawalStatus.NOT_WITHDRAWN.number } if params: p.update(params) result = session.execute(text(sql), params=p) for row in result: v = row.items()[0][1] if valuef: v = valuef(v) out[key].append({ 'value': v, 'count': row.items()[1][1], }) return out
class OrganizationImporter(CsvImporter): def __init__(self): super(OrganizationImporter, self).__init__( 'organization', OrganizationDao(), 'organizationId', 'externalId', [ ORGANIZATION_AWARDEE_ID_COLUMN, ORGANIZATION_ORGANIZATION_ID_COLUMN, ORGANIZATION_NAME_COLUMN ]) self.hpo_dao = HPODao() def _entity_from_row(self, row): hpo = self.hpo_dao.get_by_name( row[ORGANIZATION_AWARDEE_ID_COLUMN].upper()) if hpo is None: logging.warn('Invalid awardee ID %s importing organization %s', row[ORGANIZATION_AWARDEE_ID_COLUMN], row[ORGANIZATION_ORGANIZATION_ID_COLUMN]) self.errors.append( 'Invalid awardee ID {} importing organization {}'.format( row[ORGANIZATION_AWARDEE_ID_COLUMN], row[ORGANIZATION_ORGANIZATION_ID_COLUMN])) return None return Organization( externalId=row[ORGANIZATION_ORGANIZATION_ID_COLUMN].upper(), displayName=row[ORGANIZATION_NAME_COLUMN], hpoId=hpo.hpoId)
def get(self): self.hpo_dao = HPODao() # TODO: After enrollment status is filterable, # wire in 'organization', 'site', 'withdrawalStatus', and 'bucketSize'. enrollment_status = request.args.get('enrollmentStatus') awardee = request.args.get('awardee') stratification = request.args.get('stratification') start_date = request.args.get('startDate') end_date = request.args.get('endDate') params = { 'enrollment_statuses': enrollment_status, 'awardees': awardee, 'stratification': stratification, 'start_date': start_date, 'end_date': end_date } # Most parameters accepted by this API can have multiple, comma-delimited # values. Arrange them into lists. for param in params: value = params[param] if param in ['start_date', 'end_date', 'stratification']: params[param] = value continue if value is None: params[param] = [] else: params[param] = value.split(',') params = self.validate_params(params)
class DvOrderApiTestBase(FlaskTestBase): mayolink_response = None def setUp(self, use_mysql=True, with_data=True): super(DvOrderApiTestBase, self).setUp(use_mysql=use_mysql, with_data=with_data) self.dv_order_dao = DvOrderDao() self.hpo_dao = HPODao() self.participant_dao = ParticipantDao() self.summary_dao = ParticipantSummaryDao() self.code_dao = CodeDao() self.hpo = self.hpo_dao.get_by_name('PITT') self.participant = Participant(hpoId=self.hpo.hpoId, participantId=123456789, biobankId=7) self.participant_dao.insert(self.participant) self.summary = self.participant_summary(self.participant) self.summary_dao.insert(self.summary) mayolinkapi_patcher = mock.patch( 'dao.dv_order_dao.MayoLinkApi', **{'return_value.post.return_value': self.mayolink_response}) mayolinkapi_patcher.start() self.addCleanup(mayolinkapi_patcher.stop) def get_payload(self, filename): return load_test_data_json(filename) def get_orders(self): with self.dv_order_dao.session() as session: return list(session.query(BiobankDVOrder))
def test_overwrite_existing_pairing(self): participant_id = 99 created = self.dao.insert( Participant(participantId=participant_id, biobankId=2, hpoId=self._test_db.hpo_id, providerLink=make_primary_provider_link_for_id( self._test_db.hpo_id))) self.participant_summary_dao.insert(self.participant_summary(created)) self.assertEquals(created.hpoId, self._test_db.hpo_id) # sanity check other_hpo = HPODao().insert( HPO(hpoId=PITT_HPO_ID + 1, name='DIFFERENT_HPO')) other_site = SiteDao().insert( Site(hpoId=other_hpo.hpoId, siteName='Arbitrary Site', googleGroup='*****@*****.**')) with self.dao.session() as session: self.dao.add_missing_hpo_from_site(session, participant_id, other_site.siteId) # Original Participant + summary is affected. refetched = self.dao.get(participant_id) self.assertEquals(refetched.hpoId, other_hpo.hpoId) self.assertEquals(refetched.providerLink, make_primary_provider_link_for_id(other_hpo.hpoId)) self.assertEquals( self.participant_summary_dao.get(participant_id).hpoId, other_hpo.hpoId)
def main(args): skip_count = 0 new_or_updated_count = 0 matched_count = 0 with open(args.file, 'r') as csv_file: sites_reader = csv.DictReader(csv_file) hpo_dao = HPODao() site_dao = SiteDao() existing_site_map = { site.googleGroup: site for site in site_dao.get_all() } with site_dao.session() as session: for row in sites_reader: site = _site_from_row(row, hpo_dao) if site is None: skip_count += 1 continue changed = _upsert_site(site, existing_site_map.get(site.googleGroup), site_dao, session, args.dry_run) if changed: new_or_updated_count += 1 else: matched_count += 1 logging.info( 'Done%s. %d skipped, %d sites new/updated, %d sites not changed.', ' (dry run)' if args.dry_run else '', skip_count, new_or_updated_count, matched_count)
def _get_params(num_shards, shard_number): test_hpo = HPODao().get_by_name(TEST_HPO_NAME) return { 'num_shards': num_shards, 'shard_number': shard_number, 'test_hpo_id': test_hpo.hpoId, 'test_email_pattern': TEST_EMAIL_PATTERN }
def refresh_data_for_metrics_cache(self, dao): status_dao = MetricsCacheJobStatusDao() updated_time = datetime.datetime.now() kwargs = dict(cacheTableName=dao.table_name, type=str(dao.cache_type), inProgress=True, complete=False, dateInserted=updated_time) job_status_obj = MetricsCacheJobStatus(**kwargs) status_obj = status_dao.insert(job_status_obj) hpo_dao = HPODao() hpo_list = hpo_dao.get_all() for hpo in hpo_list: self.insert_cache_by_hpo(dao, hpo.hpoId, updated_time) status_dao.set_to_complete(status_obj) dao.delete_old_records()
def get_filtered_results(self, start_date, end_date, filters, stratification='ENROLLMENT_STATUS'): """Queries DB, returns results in format consumed by front-end :param start_date: Start date object :param end_date: End date object :param filters: Objects representing filters specified in UI :param stratification: How to stratify (layer) results, as in a stacked bar chart :return: Filtered, stratified results by date """ self.test_hpo_id = HPODao().get_by_name(TEST_HPO_NAME).hpoId self.test_email_pattern = TEST_EMAIL_PATTERN # Filters for participant_summary (ps) and participant (p) table # filters_sql_ps is used in the general case when we're querying participant_summary # filters_sql_p is used when also LEFT OUTER JOINing p and ps filters_sql_ps = self.get_facets_sql(filters) filters_sql_p = self.get_facets_sql(filters, table_prefix='p') if str(stratification) == 'TOTAL': strata = ['TOTAL'] sql = self.get_total_sql(filters_sql_ps) elif str(stratification) == 'ENROLLMENT_STATUS': strata = [str(val) for val in EnrollmentStatus] sql = self.get_enrollment_status_sql(filters_sql_ps, filters_sql_p) else: raise BadRequest('Invalid stratification: %s' % stratification) params = {'start_date': start_date, 'end_date': end_date} results_by_date = [] with self.session() as session: cursor = session.execute(sql, params) # Iterate through each result (by date), transforming tabular SQL results # into expected list-of-dictionaries response format try: results = cursor.fetchall() for result in results: date = result[-1] metrics = {} values = result[:-1] for i, value in enumerate(values): key = strata[i] if value == None: value = 0 metrics[key] = int(value) results_by_date.append({'date': str(date), 'metrics': metrics}) finally: cursor.close() return results_by_date
def _get_hpo_id(obj): hpo_name = _get_hpo_name_from_participant(obj) if hpo_name: hpo = HPODao().get_by_name(hpo_name) if not hpo: raise BadRequest('No HPO found with name %s' % hpo_name) return hpo.hpoId else: return UNSET_HPO_ID
def _setup_hpos(self, org_dao=None): hpo_dao = HPODao() hpo_dao.insert( HPO(hpoId=UNSET_HPO_ID, name='UNSET', displayName='Unset', organizationType=OrganizationType.UNSET)) hpo_dao.insert( HPO(hpoId=PITT_HPO_ID, name='PITT', displayName='Pittsburgh', organizationType=OrganizationType.HPO)) hpo_dao.insert( HPO(hpoId=AZ_HPO_ID, name='AZ_TUCSON', displayName='Arizona', organizationType=OrganizationType.HPO)) self.hpo_id = PITT_HPO_ID org_dao = OrganizationDao() org_dao.insert( Organization(organizationId=AZ_ORG_ID, externalId='AZ_TUCSON_BANNER_HEALTH', displayName='Banner Health', hpoId=AZ_HPO_ID)) created_org = org_dao.insert( Organization(organizationId=PITT_ORG_ID, externalId='PITT_BANNER_HEALTH', displayName='PITT display Banner Health', hpoId=PITT_HPO_ID)) self.organization_id = created_org.organizationId site_dao = SiteDao() created_site = site_dao.insert( Site(siteName='Monroeville Urgent Care Center', googleGroup='hpo-site-monroeville', mayolinkClientNumber=7035769, organizationId=PITT_ORG_ID, hpoId=PITT_HPO_ID)) self.site_id = created_site.siteId site_dao.insert( Site(siteName='Phoenix Urgent Care Center', googleGroup='hpo-site-bannerphoenix', mayolinkClientNumber=7035770, organizationId=PITT_ORG_ID, hpoId=PITT_HPO_ID)) site_dao.insert( Site(siteName='Phoenix clinic', googleGroup='hpo-site-clinic-phoenix', mayolinkClientNumber=7035770, organizationId=AZ_ORG_ID, hpoId=AZ_HPO_ID))
def main(args): HPOImporter().run(args.awardee_file, args.dry_run) HPODao()._invalidate_cache() OrganizationImporter().run(args.organization_file, args.dry_run) OrganizationDao()._invalidate_cache() SiteImporter().run(args.site_file, args.dry_run) # Update Organization BigQuery records if not args.dry_run: bq_hpo_update(args.project) bq_organization_update(args.project) bq_site_update(args.project)
def setUp(self, use_mysql=True, with_data=True): super(DvOrderApiTestBase, self).setUp(use_mysql=use_mysql, with_data=with_data) self.dv_order_dao = DvOrderDao() self.hpo_dao = HPODao() self.participant_dao = ParticipantDao() self.summary_dao = ParticipantSummaryDao() self.code_dao = CodeDao() self.hpo = self.hpo_dao.get_by_name('PITT') self.participant = Participant(hpoId=self.hpo.hpoId, participantId=123456789, biobankId=7) self.participant_dao.insert(self.participant) self.summary = self.participant_summary(self.participant) self.summary_dao.insert(self.summary) mayolinkapi_patcher = mock.patch( 'dao.dv_order_dao.MayoLinkApi', **{'return_value.post.return_value': self.mayolink_response}) mayolinkapi_patcher.start() self.addCleanup(mayolinkapi_patcher.stop)
def __init__(self, client, use_local_files=None): self._use_local_files = use_local_files self._client = client self._hpos = HPODao().get_all() self._sites = SiteDao().get_all() if not self._sites: raise BadRequest('No sites found; import sites before running generator.') self._now = clock.CLOCK.now() self._consent_questionnaire_id_and_version = None self._setup_data() self._setup_questionnaires() self._min_birth_date = self._now - datetime.timedelta(days=_MAX_PARTICIPANT_AGE * 365) self._max_days_for_birth_date = 365 * (_MAX_PARTICIPANT_AGE - _MIN_PARTICIPANT_AGE)
def setUp(self): super(ParticipantCountsOverTimeApiTest, self).setUp(use_mysql=True) self.dao = ParticipantDao() self.ps_dao = ParticipantSummaryDao() self.ps = ParticipantSummary() self.calendar_dao = CalendarDao() self.hpo_dao = HPODao() # Needed by ParticipantCountsOverTimeApi self.hpo_dao.insert(HPO(hpoId=TEST_HPO_ID, name=TEST_HPO_NAME, displayName='Test', organizationType=OrganizationType.UNSET)) self.time1 = datetime.datetime(2017, 12, 31) self.time2 = datetime.datetime(2018, 1, 1) self.time3 = datetime.datetime(2018, 1, 2) self.time4 = datetime.datetime(2018, 1, 3) # Insert 2 weeks of dates curr_date = datetime.date(2017, 12, 22) for _ in xrange(0, 14): calendar_day = Calendar(day=curr_date ) CalendarDao().insert(calendar_day) curr_date = curr_date + datetime.timedelta(days=1)
def get(self): self.hpo_dao = HPODao() params = { 'stratification': request.args.get('stratification'), 'start_date': request.args.get('startDate'), 'end_date': request.args.get('endDate'), 'enrollment_statuses': request.args.get('enrollmentStatus'), 'awardees': request.args.get('awardee') } filters = self.validate_params(params) results = self.get_filtered_results(**filters) return results
def _cleanup_old_entities(self, session, row_list, dry_run): self.hpo_dao = HPODao() log_prefix = '(dry run) ' if dry_run else '' existing_hpos = set(hpo.name for hpo in self.hpo_dao.get_all()) hpo_group_list_from_sheet = [ row[HPO_AWARDEE_ID_COLUMN].upper() for row in row_list ] hpos_to_remove = existing_hpos - set(hpo_group_list_from_sheet) if hpos_to_remove: hpo_id_list = [] for hpo in hpos_to_remove: old_hpo = self.hpo_dao.get_by_name(hpo) if old_hpo and old_hpo.isObsolete != ObsoleteStatus.OBSOLETE: hpo_id_list.append(old_hpo.hpoId) self.deletion_count += 1 elif old_hpo and old_hpo.isObsolete == ObsoleteStatus.OBSOLETE: logging.info( 'Not attempting to delete hpo [%s] with existing obsolete status', old_hpo.name) if hpo_id_list and not dry_run: logging.info(log_prefix + 'Marking old HPO as obsolete: %s', old_hpo.name) str_list = ','.join([str(i) for i in hpo_id_list]) sql = """ UPDATE HPO SET is_obsolete = 1 WHERE hpo_id in ({params})""".format(params=str_list) session.execute(sql) # Try to delete the old HPO's but if they are referenced in another table they are at least # marked as obsolete self.delete_sql_statement(session, str_list) self.hpo_dao._invalidate_cache()
def setUp(self): super(ParticipantApiTest, self).setUp() provider_link = { "primary": False, "organization": { "reference": "columbia" } } self.participant = {'providerLink': [provider_link]} self.participant_2 = {'externalId': 12345} self.provider_link_2 = { "primary": True, "organization": { "reference": "Organization/PITT", } } # Needed by test_switch_to_test_account self.hpo_dao = HPODao() self.hpo_dao.insert( HPO(hpoId=TEST_HPO_ID, name=TEST_HPO_NAME, displayName='Test', organizationType=OrganizationType.UNSET))
def switch_to_test_account(self, session, participant): test_hpo_id = HPODao().get_by_name(TEST_HPO_NAME).hpoId if participant is None: raise BadRequest('No participant %r for HPO ID udpate.') if participant.hpoId == test_hpo_id: return participant.hpoId = test_hpo_id participant.organizationId = None participant.siteId = None # Update the version and add history row self._do_update(session, participant, participant)
def _create_participant(self, hpo_name): participant_json = {} hpo = None if hpo_name: hpo = HPODao().get_by_name(hpo_name) else: if random.random() > _NO_HPO_PERCENT: hpo = random.choice(self._hpos) if hpo: if hpo.hpoId != UNSET_HPO_ID: participant_json['providerLink'] = json.loads(make_primary_provider_link_for_hpo(hpo)) creation_time = self._days_ago(random.randint(0, _MAX_DAYS_HISTORY)) participant_response = self._client.request_json( 'Participant', method='POST', body=participant_json, pretend_date=creation_time) return (participant_response, creation_time, hpo)
def setUp(self, **kwargs): super(UpdateEhrStatusUpdatesTestCase, self).setUp(use_mysql=True, **kwargs) self.hpo_dao = HPODao() self.org_dao = OrganizationDao() self.participant_dao = ParticipantDao() self.summary_dao = ParticipantSummaryDao() self.ehr_receipt_dao = EhrReceiptDao() self.hpo_foo = self._make_hpo(int_id=10, string_id='hpo_foo') self.hpo_bar = self._make_hpo(int_id=11, string_id='hpo_bar') self.org_foo_a = self._make_org(hpo=self.hpo_foo, int_id=10, external_id='FOO_A') self.org_foo_b = self._make_org(hpo=self.hpo_foo, int_id=11, external_id='FOO_B') self.org_bar_a = self._make_org(hpo=self.hpo_bar, int_id=12, external_id='BAR_A') self.participants = [ self._make_participant(hpo=self.hpo_foo, org=self.org_foo_a, int_id=11), self._make_participant(hpo=self.hpo_foo, org=self.org_foo_b, int_id=12), self._make_participant(hpo=self.hpo_bar, org=self.org_bar_a, int_id=13), self._make_participant(hpo=self.hpo_bar, org=self.org_bar_a, int_id=14), ]