Exemple #1
0
class OrganizationImporter(CsvImporter):
    def __init__(self):
        super(OrganizationImporter, self).__init__(
            'organization', OrganizationDao(), 'organizationId', 'externalId',
            [
                ORGANIZATION_AWARDEE_ID_COLUMN,
                ORGANIZATION_ORGANIZATION_ID_COLUMN, ORGANIZATION_NAME_COLUMN
            ])
        self.hpo_dao = HPODao()

    def _entity_from_row(self, row):
        hpo = self.hpo_dao.get_by_name(
            row[ORGANIZATION_AWARDEE_ID_COLUMN].upper())
        if hpo is None:
            logging.warn('Invalid awardee ID %s importing organization %s',
                         row[ORGANIZATION_AWARDEE_ID_COLUMN],
                         row[ORGANIZATION_ORGANIZATION_ID_COLUMN])
            self.errors.append(
                'Invalid awardee ID {} importing organization {}'.format(
                    row[ORGANIZATION_AWARDEE_ID_COLUMN],
                    row[ORGANIZATION_ORGANIZATION_ID_COLUMN]))
            return None
        return Organization(
            externalId=row[ORGANIZATION_ORGANIZATION_ID_COLUMN].upper(),
            displayName=row[ORGANIZATION_NAME_COLUMN],
            hpoId=hpo.hpoId)
class DvOrderApiTestBase(FlaskTestBase):
    mayolink_response = None

    def setUp(self, use_mysql=True, with_data=True):
        super(DvOrderApiTestBase, self).setUp(use_mysql=use_mysql,
                                              with_data=with_data)
        self.dv_order_dao = DvOrderDao()
        self.hpo_dao = HPODao()
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.code_dao = CodeDao()

        self.hpo = self.hpo_dao.get_by_name('PITT')
        self.participant = Participant(hpoId=self.hpo.hpoId,
                                       participantId=123456789,
                                       biobankId=7)
        self.participant_dao.insert(self.participant)
        self.summary = self.participant_summary(self.participant)
        self.summary_dao.insert(self.summary)

        mayolinkapi_patcher = mock.patch(
            'dao.dv_order_dao.MayoLinkApi',
            **{'return_value.post.return_value': self.mayolink_response})
        mayolinkapi_patcher.start()
        self.addCleanup(mayolinkapi_patcher.stop)

    def get_payload(self, filename):
        return load_test_data_json(filename)

    def get_orders(self):
        with self.dv_order_dao.session() as session:
            return list(session.query(BiobankDVOrder))
class HPOImporter(CsvImporter):
    def __init__(self):
        super(HPOImporter, self).__init__(
            'awardee', HPODao(), 'hpoId', 'name',
            [HPO_AWARDEE_ID_COLUMN, HPO_NAME_COLUMN, HPO_TYPE_COLUMN])
        self.new_count = 0
        self.environment = None

    def _entity_from_row(self, row):
        type_str = row[HPO_TYPE_COLUMN]
        try:
            organization_type = OrganizationType(type_str)
            if organization_type == OrganizationType.UNSET:
                organization_type = None
        except TypeError:
            logging.warn('Invalid organization type %s for awardee %s',
                         type_str, row[HPO_AWARDEE_ID_COLUMN])
            self.errors.append(
                'Invalid organization type {} for awardee {}'.format(
                    type_str, row[HPO_AWARDEE_ID_COLUMN]))
            return None
        return HPO(name=row[HPO_AWARDEE_ID_COLUMN].upper(),
                   displayName=row[HPO_NAME_COLUMN],
                   organizationType=organization_type)

    def _insert_entity(self, entity, existing_map, session, dry_run):
        # HPO IDs are not autoincremented by the database; manually set it here.
        entity.hpoId = len(existing_map) + self.new_count
        self.new_count += 1
        super(HPOImporter, self)._insert_entity(entity, existing_map, session,
                                                dry_run)

    def delete_sql_statement(self, session, str_list):
        sql = """
          DELETE FROM hpo
          WHERE hpo_id IN ({str_list})
          AND NOT EXISTS(
          SELECT * FROM participant WHERE hpo_id = hpo.hpo_id)
          AND NOT EXISTS(
          SELECT * FROM participant_history WHERE hpo_id = hpo.hpo_id)
          AND NOT EXISTS(
          SELECT * FROM participant_summary WHERE hpo_id = hpo.hpo_id)
          AND NOT EXISTS(
          SELECT * FROM organization WHERE hpo_id = hpo.hpo_id)
          AND NOT EXISTS(
          SELECT * FROM site WHERE hpo_id = hpo.hpo_id)
          """.format(str_list=str_list)

        session.execute(sql)

    def _cleanup_old_entities(self, session, row_list, dry_run):
        self.hpo_dao = HPODao()
        log_prefix = '(dry run) ' if dry_run else ''
        existing_hpos = set(hpo.name for hpo in self.hpo_dao.get_all())
        hpo_group_list_from_sheet = [
            row[HPO_AWARDEE_ID_COLUMN].upper() for row in row_list
        ]

        hpos_to_remove = existing_hpos - set(hpo_group_list_from_sheet)
        if hpos_to_remove:
            hpo_id_list = []
            for hpo in hpos_to_remove:
                old_hpo = self.hpo_dao.get_by_name(hpo)
                if old_hpo and old_hpo.isObsolete != ObsoleteStatus.OBSOLETE:
                    hpo_id_list.append(old_hpo.hpoId)
                    self.deletion_count += 1
                elif old_hpo and old_hpo.isObsolete == ObsoleteStatus.OBSOLETE:
                    logging.info(
                        'Not attempting to delete hpo [%s] with existing obsolete status',
                        old_hpo.name)

            if hpo_id_list and not dry_run:
                logging.info(log_prefix + 'Marking old HPO as obsolete: %s',
                             old_hpo.name)
                str_list = ','.join([str(i) for i in hpo_id_list])

                sql = """ UPDATE HPO
            SET is_obsolete = 1
            WHERE hpo_id in ({params})""".format(params=str_list)

                session.execute(sql)

                # Try to delete the old HPO's but if they are referenced in another table they are at least
                # marked as obsolete
                self.delete_sql_statement(session, str_list)
                self.hpo_dao._invalidate_cache()
class OrganizationImporter(CsvImporter):
    def __init__(self):
        super(OrganizationImporter, self).__init__(
            'organization', OrganizationDao(), 'organizationId', 'externalId',
            [
                ORGANIZATION_AWARDEE_ID_COLUMN,
                ORGANIZATION_ORGANIZATION_ID_COLUMN, ORGANIZATION_NAME_COLUMN
            ])
        self.hpo_dao = HPODao()
        self.environment = None

    def _entity_from_row(self, row):
        hpo = self.hpo_dao.get_by_name(
            row[ORGANIZATION_AWARDEE_ID_COLUMN].upper())
        if hpo is None:
            logging.warn('Invalid awardee ID %s importing organization %s',
                         row[ORGANIZATION_AWARDEE_ID_COLUMN],
                         row[ORGANIZATION_ORGANIZATION_ID_COLUMN])
            self.errors.append(
                'Invalid awardee ID {} importing organization {}'.format(
                    row[ORGANIZATION_AWARDEE_ID_COLUMN],
                    row[ORGANIZATION_ORGANIZATION_ID_COLUMN]))
            return None
        return Organization(
            externalId=row[ORGANIZATION_ORGANIZATION_ID_COLUMN].upper(),
            displayName=row[ORGANIZATION_NAME_COLUMN],
            hpoId=hpo.hpoId)

    def delete_sql_statement(self, session, str_list):
        sql = """
          DELETE FROM organization
          WHERE organization_id IN ({str_list})
          AND NOT EXISTS(
          SELECT * FROM participant WHERE organization_id = organization.organization_id)
          AND NOT EXISTS(
          SELECT * FROM participant_summary WHERE organization_id = organization.organization_id)
          AND NOT EXISTS(
          SELECT * FROM participant_history WHERE organization_id = organization.organization_id)
          AND NOT EXISTS(
          SELECT * FROM site WHERE organization_id = organization.organization_id)
          """.format(str_list=str_list)

        session.execute(sql)

    def _cleanup_old_entities(self, session, row_list, dry_run):
        log_prefix = '(dry run) ' if dry_run else ''
        self.org_dao = OrganizationDao()
        existing_orgs = set(
            str(org.externalId) for org in self.org_dao.get_all())
        org_group_list_from_sheet = [
            row[ORGANIZATION_ORGANIZATION_ID_COLUMN].upper()
            for row in row_list
        ]

        orgs_to_remove = existing_orgs - set(org_group_list_from_sheet)
        if orgs_to_remove:
            org_id_list = []
            for org in orgs_to_remove:
                old_org = self.org_dao.get_by_external_id(org)
                if old_org and old_org.isObsolete != ObsoleteStatus.OBSOLETE:
                    org_id_list.append(old_org.organizationId)
                    self.deletion_count += 1
                elif old_org and old_org.isObsolete == ObsoleteStatus.OBSOLETE:
                    logging.info(
                        'Not attempting to delete org [%s] with existing obsolete status',
                        old_org.displayName)

            if org_id_list and not dry_run:
                logging.info(
                    log_prefix + 'Marking old Organization as obsolete : %s',
                    old_org)
                str_list = ','.join([str(i) for i in org_id_list])
                sql = """ UPDATE organization
            SET is_obsolete = 1
            WHERE organization_id in ({org_id_list})""".format(
                    org_id_list=str_list)
                session.execute(sql)

                logging.info(
                    log_prefix +
                    'Deleting old Organization no longer in Google sheet: %s',
                    org)
                self.delete_sql_statement(session, str_list)
                self.org_dao._invalidate_cache()
class ParticipantCountsOverTimeApiTest(FlaskTestBase):

  def setUp(self):
    super(ParticipantCountsOverTimeApiTest, self).setUp(use_mysql=True)
    self.dao = ParticipantDao()
    self.ps_dao = ParticipantSummaryDao()
    self.ps = ParticipantSummary()
    self.calendar_dao = CalendarDao()
    self.hpo_dao = HPODao()

    # Needed by ParticipantCountsOverTimeApi
    self.hpo_dao.insert(HPO(hpoId=TEST_HPO_ID, name=TEST_HPO_NAME, displayName='Test',
                       organizationType=OrganizationType.UNSET))

    self.time1 = datetime.datetime(2017, 12, 31)
    self.time2 = datetime.datetime(2018, 1, 1)
    self.time3 = datetime.datetime(2018, 1, 2)
    self.time4 = datetime.datetime(2018, 1, 3)

    # Insert 2 weeks of dates
    curr_date = datetime.date(2017, 12, 22)
    for _ in xrange(0, 14):
      calendar_day = Calendar(day=curr_date )
      CalendarDao().insert(calendar_day)
      curr_date = curr_date + datetime.timedelta(days=1)

  def _insert(self, participant, first_name=None, last_name=None, hpo_name=None,
              unconsented=False, time_int=None, time_mem=None, time_fp=None):
    """
    Create a participant in a transient test database.

    :param participant: Participant object
    :param first_name: First name
    :param last_name: Last name
    :param hpo_name: HPO name (one of PITT or AZ_TUCSON)
    :param time_int: Time that participant fulfilled INTERESTED criteria
    :param time_mem: Time that participant fulfilled MEMBER criteria
    :param time_fp: Time that participant fulfilled FULL_PARTICIPANT criteria
    :return: Participant object
    """

    if unconsented is True:
      enrollment_status = None
    elif time_mem is None:
      enrollment_status = EnrollmentStatus.INTERESTED
    elif time_fp is None:
      enrollment_status = EnrollmentStatus.MEMBER
    else:
      enrollment_status = EnrollmentStatus.FULL_PARTICIPANT

    with FakeClock(time_int):
      self.dao.insert(participant)

    participant.providerLink = make_primary_provider_link_for_name(hpo_name)
    with FakeClock(time_mem):
      self.dao.update(participant)

    if enrollment_status is None:
      return None

    summary = self.participant_summary(participant)

    if first_name:
      summary.firstName = first_name
    if last_name:
      summary.lastName = last_name

    summary.dateOfBirth = datetime.date(1978, 10, 10)

    summary.enrollmentStatus = enrollment_status

    summary.hpoId = self.hpo_dao.get_by_name(hpo_name).hpoId

    if time_mem is not None:
      with FakeClock(time_mem):
        summary.consentForElectronicHealthRecordsTime = time_mem

    if time_fp is not None:
      with FakeClock(time_fp):
        summary.consentForElectronicHealthRecordsTime = time_fp
        summary.questionnaireOnTheBasicsTime = time_fp
        summary.questionnaireOnLifestyleTime = time_fp
        summary.questionnaireOnOverallHealthTime = time_fp
        summary.physicalMeasurementsFinalizedTime = time_fp
        summary.sampleOrderStatus1ED04Time = time_fp
        summary.sampleOrderStatus1SALTime = time_fp

    self.ps_dao.insert(summary)

    return summary

  def test_get_counts_with_default_parameters(self):
    # The most basic test in this class

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    # TODO: remove bucketSize from these parameters in all tests
    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    # You can debug API responses easily by uncommenting the lines below
    # print('response')
    # print(response)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 1)

  def test_get_counts_with_single_awardee_filter(self):
    # Does the awardee filter work?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p1 = Participant(participantId=2, biobankId=5)
    self._insert(p1, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p1 = Participant(participantId=3, biobankId=6)
    self._insert(p1, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    # enrollmentStatus param left blank to test we can handle it
    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=PITT
      &enrollmentStatus=
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 1)

    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=AZ_TUCSON
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 2)

  def test_get_counts_with_single_awardee_filter(self):
    # Does the awardee filter work when passed a single awardee?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p1 = Participant(participantId=2, biobankId=5)
    self._insert(p1, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p1 = Participant(participantId=3, biobankId=6)
    self._insert(p1, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=PITT
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=AZ_TUCSON
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 2)

  def test_get_counts_with_multiple_awardee_filters(self):
    # Does the awardee filter work when passed more than one awardee?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=PITT,AZ_TUCSON
        &enrollmentStatus=
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 3)

  def test_get_counts_with_enrollment_status_member_filter(self):

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1, time_mem=self.time2)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1, time_mem=self.time2)

    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1, time_mem=self.time3)

    # awardee param intentionally left blank to test we can handle it
    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=
      &enrollmentStatus=MEMBER
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    member_count_day_1 = response[0]['metrics']['MEMBER']
    member_count_day_2 = response[1]['metrics']['MEMBER']
    member_count_day_3 = response[2]['metrics']['MEMBER']
    member_count_day_4 = response[3]['metrics']['MEMBER']
    interested_count_day_4 = response[1]['metrics']['INTERESTED']

    self.assertEquals(member_count_day_1, 0)
    self.assertEquals(member_count_day_2, 0)
    self.assertEquals(member_count_day_3, 2)
    self.assertEquals(member_count_day_4, 3)
    self.assertEquals(interested_count_day_4, 0)

    qs = """
      bucketSize=1
      &stratification=TOTAL
      &startDate=2017-12-30
      &endDate=2018-01-04
      &enrollmentStatus=MEMBER
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)

  def test_get_counts_with_enrollment_status_full_participant_filter(self):

    # MEMBER @ time 1
    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1,
                 time_mem=self.time1)

    # FULL PARTICIPANT @ time 2
    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 2
    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 3
    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time3)

    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &enrollmentStatus=FULL_PARTICIPANT
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    full_participant_count_day_1 = response[0]['metrics']['FULL_PARTICIPANT']
    full_participant_count_day_2 = response[1]['metrics']['FULL_PARTICIPANT']
    full_participant_count_day_3 = response[2]['metrics']['FULL_PARTICIPANT']
    full_participant_count_day_4 = response[3]['metrics']['FULL_PARTICIPANT']
    member_count_day_4 = response[4]['metrics']['MEMBER']

    self.assertEquals(full_participant_count_day_1, 0)
    self.assertEquals(full_participant_count_day_2, 0)
    self.assertEquals(full_participant_count_day_3, 2)
    self.assertEquals(full_participant_count_day_4, 3)
    self.assertEquals(member_count_day_4, 0)  # Excluded per enrollmentStatus parameter

  def test_get_counts_with_total_enrollment_status_full_participant_filter(self):
    # When filtering with TOTAL stratification, filtered participants are
    # returned by their sign up date, not the date they reached their highest
    # enrollment status.

    # MEMBER @ time 1
    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1,
                 time_mem=self.time1)

    # FULL PARTICIPANT @ time 2
    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 2
    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 3
    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time3)

    qs = """
      bucketSize=1
      &stratification=TOTAL
      &startDate=2017-12-30
      &endDate=2018-01-04
      &enrollmentStatus=FULL_PARTICIPANT
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']
    total_count_day_3 = response[2]['metrics']['TOTAL']
    total_count_day_4 = response[3]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)
    self.assertEquals(total_count_day_3, 3)
    self.assertEquals(total_count_day_4, 3)

  def test_get_counts_with_single_various_filters(self):
    # Do the awardee and enrollment status filters work when passed single values?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1)

    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1,
                 time_mem=self.time1)

    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=PITT
      &enrollmentStatus=MEMBER
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']
    member_count_day_2 = response[1]['metrics']['MEMBER']

    self.assertEquals(interested_count_day_1, 0)

    # We requested data for only MEMBERs, so no INTERESTEDs should be returned
    self.assertEquals(interested_count_day_2, 0)

    # We requested data for only MEMBERs in PITT, so no MEMBERs in AZ_TUCSON should be returned
    self.assertEquals(member_count_day_2, 1)

  def test_get_counts_with_multiple_various_filters(self):
    # Do the awardee and enrollment status filters work when passed multiple values?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=AZ_TUCSON,PITT
        &enrollmentStatus=INTERESTED,MEMBER,FULL_PARTICIPANT
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 3)

  def test_get_counts_with_total_stratification_unfiltered(self):

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
      bucketSize=1
      &stratification=TOTAL
      &startDate=2017-12-30
      &endDate=2018-01-04
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)

  def test_get_counts_excluding_interested_participants(self):
    # When filtering only for MEMBER, no INTERESTED (neither consented nor unconsented) should be counted

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'UNSET', unconsented=True, time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1, time_mem=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &enrollmentStatus=MEMBER
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_2 = response[1]['metrics']['INTERESTED']
    member_count_day_2 = response[1]['metrics']['MEMBER']

    self.assertEquals(interested_count_day_2, 0)
    self.assertEquals(member_count_day_2, 1)

  def test_get_counts_excluding_withdrawn_participants(self):
    # Withdrawn participants should not appear in counts

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    ps3 = self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)
    ps3.withdrawalStatus = WithdrawalStatus.NO_USE  # Chad withdrew from the study
    self.ps_dao.update(ps3)

    qs = """
        bucketSize=1
        &stratification=TOTAL
        &startDate=2017-12-30
        &endDate=2018-01-04
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 2)

  def test_get_counts_for_unconsented_individuals(self):
    # Those who have signed up but not consented should be INTERESTED

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'UNSET', unconsented=True, time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
          bucketSize=1
          &stratification=ENROLLMENT_STATUS
          &startDate=2017-12-30
          &endDate=2018-01-04
          """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['INTERESTED']
    total_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)

  def test_url_parameter_validation_for_date_range(self):
    # Ensure requests for very long date ranges are marked BAD REQUEST

    qs = """
        bucketSize=1
        &stratification=TOTAL
        &startDate=2017-12-30
        &endDate=2217-12-30
        """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)

  def test_url_parameter_validation_for_stratifications(self):
    # Ensure requests invalid stratifications are marked BAD REQUEST

    qs = """
          bucketSize=1
          &stratification=FOOBAR
          &startDate=2017-12-30
          &endDate=2018-01-04
          """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)

  def test_url_parameter_validation_for_awardee(self):
    # Ensure requests invalid awardee are marked BAD REQUEST

    qs = """
            bucketSize=1
            &stratification=ENROLLMENT_STATUS
            &startDate=2017-12-30
            &endDate=2018-01-04
            &awardee=FOOBAR
            """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)

  def test_url_parameter_validation_for_enrollment_status(self):
    # Ensure requests invalid enrollment status are marked BAD REQUEST

    qs = """
            bucketSize=1
            &stratification=ENROLLMENT_STATUS
            &startDate=2017-12-30
            &endDate=2018-01-04
            &enrollmentStatus=FOOBAR
            """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)
class ParticipantSummaryDao(UpdatableDao):
    def __init__(self):
        super(ParticipantSummaryDao,
              self).__init__(ParticipantSummary,
                             order_by_ending=_ORDER_BY_ENDING)
        self.hpo_dao = HPODao()
        self.code_dao = CodeDao()

    def get_id(self, obj):
        return obj.participantId

    def get_by_email(self, email):
        with self.session() as session:
            return session.query(ParticipantSummary).filter(
                ParticipantSummary.email == email).all()

    def _validate_update(self, session, obj, existing_obj):
        """Participant summaries don't have a version value; drop it from validation logic."""
        if not existing_obj:
            raise NotFound('%s with id %s does not exist' %
                           (self.model_type.__name__, id))

    def _has_withdrawn_filter(self, query):
        for field_filter in query.field_filters:
            if (field_filter.field_name == 'withdrawalStatus'
                    and field_filter.value == WithdrawalStatus.NO_USE):
                return True
            if field_filter.field_name == 'withdrawalTime' and field_filter.value is not None:
                return True
        return False

    def _get_non_withdrawn_filter_field(self, query):
        """Returns the first field referenced in query filters which isn't in
    WITHDRAWN_PARTICIPANT_FIELDS."""
        for field_filter in query.field_filters:
            if not field_filter.field_name in WITHDRAWN_PARTICIPANT_FIELDS:
                return field_filter.field_name
        return None

    def _initialize_query(self, session, query_def):
        non_withdrawn_field = self._get_non_withdrawn_filter_field(query_def)
        if self._has_withdrawn_filter(query_def):
            if non_withdrawn_field:
                raise BadRequest(
                    "Can't query on %s for withdrawn participants" %
                    non_withdrawn_field)
            # When querying for withdrawn participants, ensure that the only fields being filtered on or
            # ordered by are in WITHDRAWN_PARTICIPANT_FIELDS.
            return super(ParticipantSummaryDao,
                         self)._initialize_query(session, query_def)
        else:
            query = super(ParticipantSummaryDao,
                          self)._initialize_query(session, query_def)
            if non_withdrawn_field:
                # When querying on fields that aren't available for withdrawn participants,
                # ensure that we only return participants
                # who have not withdrawn or withdrew in the past 48 hours.
                withdrawn_visible_start = clock.CLOCK.now(
                ) - WITHDRAWN_PARTICIPANT_VISIBILITY_TIME
                return query.filter(
                    or_(
                        ParticipantSummary.withdrawalStatus !=
                        WithdrawalStatus.NO_USE,
                        ParticipantSummary.withdrawalTime >=
                        withdrawn_visible_start))
            else:
                # When querying on fields that are available for withdrawn participants, return everybody;
                # withdrawn participants will have all but WITHDRAWN_PARTICIPANT_FIELDS cleared out 48
                # hours after withdrawing.
                return query

    def _get_order_by_ending(self, query):
        if self._has_withdrawn_filter(query):
            return _WITHDRAWN_ORDER_BY_ENDING
        return self.order_by_ending

    def _add_order_by(self, query, order_by, field_names, fields):
        if order_by.field_name in _CODE_FILTER_FIELDS:
            return super(ParticipantSummaryDao, self)._add_order_by(
                query, OrderBy(order_by.field_name + 'Id', order_by.ascending),
                field_names, fields)
        return super(ParticipantSummaryDao,
                     self)._add_order_by(query, order_by, field_names, fields)

    def make_query_filter(self, field_name, value):
        """Handle HPO and code values when parsing filter values."""
        if field_name == 'hpoId':
            hpo = self.hpo_dao.get_by_name(value)
            if not hpo:
                raise BadRequest('No HPO found with name %s' % value)
            return super(ParticipantSummaryDao,
                         self).make_query_filter(field_name, hpo.hpoId)
        if field_name in _CODE_FILTER_FIELDS:
            if value == UNSET:
                return super(ParticipantSummaryDao,
                             self).make_query_filter(field_name + 'Id', None)
            # Note: we do not at present support querying for UNMAPPED code values.
            code = self.code_dao.get_code(PPI_SYSTEM, value)
            if not code:
                raise BadRequest('No code found: %s' % value)
            return super(ParticipantSummaryDao,
                         self).make_query_filter(field_name + 'Id',
                                                 code.codeId)
        return super(ParticipantSummaryDao,
                     self).make_query_filter(field_name, value)

    def update_from_biobank_stored_samples(self, participant_id=None):
        """Rewrites sample-related summary data. Call this after updating BiobankStoredSamples.
    If participant_id is provided, only that participant will have their summary updated."""
        baseline_tests_sql, baseline_tests_params = get_sql_and_params_for_array(
            config.getSettingList(config.BASELINE_SAMPLE_TEST_CODES),
            'baseline')
        dna_tests_sql, dna_tests_params = get_sql_and_params_for_array(
            config.getSettingList(config.DNA_SAMPLE_TEST_CODES), 'dna')
        sample_sql, sample_params = _get_sample_sql_and_params()
        sql = """
    UPDATE
      participant_summary
    SET
      num_baseline_samples_arrived = (
        SELECT
          COUNT(*)
        FROM
          biobank_stored_sample
        WHERE
          biobank_stored_sample.biobank_id = participant_summary.biobank_id
          AND biobank_stored_sample.test IN %s
      ),
      samples_to_isolate_dna = (
          CASE WHEN EXISTS(SELECT * FROM biobank_stored_sample
                           WHERE biobank_stored_sample.biobank_id = participant_summary.biobank_id
                           AND biobank_stored_sample.test IN %s)
          THEN :received ELSE :unset END
      ) %s""" % (baseline_tests_sql, dna_tests_sql, sample_sql)
        params = {
            'received': int(SampleStatus.RECEIVED),
            'unset': int(SampleStatus.UNSET)
        }
        params.update(baseline_tests_params)
        params.update(dna_tests_params)
        params.update(sample_params)
        enrollment_status_params = {
            'submitted': int(QuestionnaireStatus.SUBMITTED),
            'num_baseline_ppi_modules': self._get_num_baseline_ppi_modules(),
            'completed': int(PhysicalMeasurementsStatus.COMPLETED),
            'received': int(SampleStatus.RECEIVED),
            'full_participant': int(EnrollmentStatus.FULL_PARTICIPANT),
            'member': int(EnrollmentStatus.MEMBER),
            'interested': int(EnrollmentStatus.INTERESTED)
        }
        enrollment_status_sql = _ENROLLMENT_STATUS_SQL
        # If participant_id is provided, add the participant ID filter to both update statements.
        if participant_id:
            sql += _PARTICIPANT_ID_FILTER
            params['participant_id'] = participant_id
            enrollment_status_sql += _PARTICIPANT_ID_FILTER
            enrollment_status_params['participant_id'] = participant_id

        with self.session() as session:
            session.execute(sql, params)
            session.execute(enrollment_status_sql, enrollment_status_params)

    def _get_num_baseline_ppi_modules(self):
        return len(
            config.getSettingList(config.BASELINE_PPI_QUESTIONNAIRE_FIELDS))

    def update_enrollment_status(self, summary):
        """Updates the enrollment status field on the provided participant summary to
    the correct value based on the other fields on it. Called after
    a questionnaire response or physical measurements are submitted."""
        consent = (summary.consentForStudyEnrollment
                   == QuestionnaireStatus.SUBMITTED
                   and summary.consentForElectronicHealthRecords
                   == QuestionnaireStatus.SUBMITTED)
        enrollment_status = self.calculate_enrollment_status(
            consent, summary.numCompletedBaselinePPIModules,
            summary.physicalMeasurementsStatus, summary.samplesToIsolateDNA)
        summary.enrollment_status = enrollment_status

    def calculate_enrollment_status(self, consent_for_study_enrollment_and_ehr,
                                    num_completed_baseline_ppi_modules,
                                    physical_measurements_status,
                                    samples_to_isolate_dna):
        if consent_for_study_enrollment_and_ehr:
            if (num_completed_baseline_ppi_modules
                    == self._get_num_baseline_ppi_modules()
                    and physical_measurements_status
                    == PhysicalMeasurementsStatus.COMPLETED
                    and samples_to_isolate_dna == SampleStatus.RECEIVED):
                return EnrollmentStatus.FULL_PARTICIPANT
            return EnrollmentStatus.MEMBER
        return EnrollmentStatus.INTERESTED

    def to_client_json(self, model):
        result = model.asdict()
        # Participants that withdrew more than 48 hours ago should have fields other than
        # WITHDRAWN_PARTICIPANT_FIELDS cleared.
        if (model.withdrawalStatus == WithdrawalStatus.NO_USE
                and model.withdrawalTime <
                clock.CLOCK.now() - WITHDRAWN_PARTICIPANT_VISIBILITY_TIME):
            result = {k: result.get(k) for k in WITHDRAWN_PARTICIPANT_FIELDS}

        result['participantId'] = to_client_participant_id(model.participantId)
        biobank_id = result.get('biobankId')
        if biobank_id:
            result['biobankId'] = to_client_biobank_id(biobank_id)
        date_of_birth = result.get('dateOfBirth')
        if date_of_birth:
            result['ageRange'] = get_bucketed_age(date_of_birth,
                                                  clock.CLOCK.now())
        else:
            result['ageRange'] = UNSET
        format_json_hpo(result, self.hpo_dao, 'hpoId')
        _initialize_field_type_sets()
        for fieldname in _DATE_FIELDS:
            format_json_date(result, fieldname)
        for fieldname in _CODE_FIELDS:
            format_json_code(result, self.code_dao, fieldname)
        for fieldname in _ENUM_FIELDS:
            format_json_enum(result, fieldname)
        if (model.withdrawalStatus == WithdrawalStatus.NO_USE
                or model.suspensionStatus == SuspensionStatus.NO_CONTACT):
            result['recontactMethod'] = 'NO_CONTACT'
        # Strip None values.
        result = {k: v for k, v in result.iteritems() if v is not None}

        return result
class ParticipantSummaryDao(UpdatableDao):
    def __init__(self):
        super(ParticipantSummaryDao,
              self).__init__(ParticipantSummary,
                             order_by_ending=_ORDER_BY_ENDING)
        self.hpo_dao = HPODao()
        self.code_dao = CodeDao()
        self.site_dao = SiteDao()
        self.organization_dao = OrganizationDao()

    def get_id(self, obj):
        return obj.participantId

    def _validate_update(self, session, obj, existing_obj):  # pylint: disable=unused-argument
        """Participant summaries don't have a version value; drop it from validation logic."""
        if not existing_obj:
            raise NotFound('%s with id %s does not exist' %
                           (self.model_type.__name__, id))

    def _has_withdrawn_filter(self, query):
        for field_filter in query.field_filters:
            if (field_filter.field_name == 'withdrawalStatus'
                    and field_filter.value == WithdrawalStatus.NO_USE):
                return True
            if field_filter.field_name == 'withdrawalTime' and field_filter.value is not None:
                return True
        return False

    def _get_non_withdrawn_filter_field(self, query):
        """Returns the first field referenced in query filters which isn't in
    WITHDRAWN_PARTICIPANT_FIELDS."""
        for field_filter in query.field_filters:
            if not field_filter.field_name in WITHDRAWN_PARTICIPANT_FIELDS:
                return field_filter.field_name
        return None

    def _initialize_query(self, session, query_def):
        non_withdrawn_field = self._get_non_withdrawn_filter_field(query_def)
        if self._has_withdrawn_filter(query_def):
            if non_withdrawn_field:
                raise BadRequest(
                    "Can't query on %s for withdrawn participants" %
                    non_withdrawn_field)
            # When querying for withdrawn participants, ensure that the only fields being filtered on or
            # ordered by are in WITHDRAWN_PARTICIPANT_FIELDS.
            return super(ParticipantSummaryDao,
                         self)._initialize_query(session, query_def)
        else:
            query = super(ParticipantSummaryDao,
                          self)._initialize_query(session, query_def)
            if non_withdrawn_field:
                # When querying on fields that aren't available for withdrawn participants,
                # ensure that we only return participants
                # who have not withdrawn or withdrew in the past 48 hours.
                withdrawn_visible_start = clock.CLOCK.now(
                ) - WITHDRAWN_PARTICIPANT_VISIBILITY_TIME
                return query.filter(
                    or_(
                        ParticipantSummary.withdrawalStatus !=
                        WithdrawalStatus.NO_USE,
                        ParticipantSummary.withdrawalTime >=
                        withdrawn_visible_start))
            else:
                # When querying on fields that are available for withdrawn participants, return everybody;
                # withdrawn participants will have all but WITHDRAWN_PARTICIPANT_FIELDS cleared out 48
                # hours after withdrawing.
                return query

    def _get_order_by_ending(self, query):
        if self._has_withdrawn_filter(query):
            return _WITHDRAWN_ORDER_BY_ENDING
        return self.order_by_ending

    def _add_order_by(self, query, order_by, field_names, fields):
        if order_by.field_name in _CODE_FILTER_FIELDS:
            return super(ParticipantSummaryDao, self)._add_order_by(
                query, OrderBy(order_by.field_name + 'Id', order_by.ascending),
                field_names, fields)
        return super(ParticipantSummaryDao,
                     self)._add_order_by(query, order_by, field_names, fields)

    def make_query_filter(self, field_name, value):
        """Handle HPO and code values when parsing filter values."""
        if field_name == 'biobankId':
            value = from_client_biobank_id(value, log_exception=True)
        if field_name == 'hpoId' or field_name == 'awardee':
            hpo = self.hpo_dao.get_by_name(value)
            if not hpo:
                raise BadRequest('No HPO found with name %s' % value)
            if field_name == 'awardee':
                field_name = 'hpoId'
            return super(ParticipantSummaryDao,
                         self).make_query_filter(field_name, hpo.hpoId)
        if field_name == 'organization':
            organization = self.organization_dao.get_by_external_id(value)
            if not organization:
                raise BadRequest('No organization found with name %s' % value)
            return super(ParticipantSummaryDao,
                         self).make_query_filter(field_name + 'Id',
                                                 organization.organizationId)
        if field_name in _SITE_FIELDS:
            if value == UNSET:
                return super(ParticipantSummaryDao,
                             self).make_query_filter(field_name + 'Id', None)
            site = self.site_dao.get_by_google_group(value)
            if not site:
                raise BadRequest('No site found with google group %s' % value)
            return super(ParticipantSummaryDao,
                         self).make_query_filter(field_name + 'Id',
                                                 site.siteId)
        if field_name in _CODE_FILTER_FIELDS:
            if value == UNSET:
                return super(ParticipantSummaryDao,
                             self).make_query_filter(field_name + 'Id', None)
            # Note: we do not at present support querying for UNMAPPED code values.
            code = self.code_dao.get_code(PPI_SYSTEM, value)
            if not code:
                raise BadRequest('No code found: %s' % value)
            return super(ParticipantSummaryDao,
                         self).make_query_filter(field_name + 'Id',
                                                 code.codeId)
        return super(ParticipantSummaryDao,
                     self).make_query_filter(field_name, value)

    def update_from_biobank_stored_samples(self, participant_id=None):
        """Rewrites sample-related summary data. Call this after updating BiobankStoredSamples.
    If participant_id is provided, only that participant will have their summary updated."""
        now = clock.CLOCK.now()
        sample_sql, sample_params = _get_sample_sql_and_params(now)

        baseline_tests_sql, baseline_tests_params = _get_baseline_sql_and_params(
        )
        dna_tests_sql, dna_tests_params = _get_dna_isolates_sql_and_params()

        sample_status_time_sql = _get_sample_status_time_sql_and_params()
        sample_status_time_params = {}

        counts_sql = """
    UPDATE
      participant_summary
    SET
      num_baseline_samples_arrived = {baseline_tests_sql},
      samples_to_isolate_dna = {dna_tests_sql},
      last_modified = :now
    WHERE
      num_baseline_samples_arrived != {baseline_tests_sql} OR
      samples_to_isolate_dna != {dna_tests_sql}
    """.format(baseline_tests_sql=baseline_tests_sql,
               dna_tests_sql=dna_tests_sql)
        counts_params = {'now': now}
        counts_params.update(baseline_tests_params)
        counts_params.update(dna_tests_params)

        enrollment_status_sql = _ENROLLMENT_STATUS_SQL
        enrollment_status_params = {
            'submitted': int(QuestionnaireStatus.SUBMITTED),
            'unset': int(QuestionnaireStatus.UNSET),
            'num_baseline_ppi_modules': self._get_num_baseline_ppi_modules(),
            'completed': int(PhysicalMeasurementsStatus.COMPLETED),
            'received': int(SampleStatus.RECEIVED),
            'full_participant': int(EnrollmentStatus.FULL_PARTICIPANT),
            'member': int(EnrollmentStatus.MEMBER),
            'interested': int(EnrollmentStatus.INTERESTED),
            'now': now
        }

        # If participant_id is provided, add the participant ID filter to all update statements.
        if participant_id:
            sample_sql += ' AND participant_id = :participant_id'
            sample_params['participant_id'] = participant_id
            counts_sql += ' AND participant_id = :participant_id'
            counts_params['participant_id'] = participant_id
            enrollment_status_sql += ' AND participant_id = :participant_id'
            enrollment_status_params['participant_id'] = participant_id
            sample_status_time_sql += ' AND a.participant_id = :participant_id'
            sample_status_time_params['participant_id'] = participant_id

        sample_sql = replace_null_safe_equals(sample_sql)
        counts_sql = replace_null_safe_equals(counts_sql)

        with self.session() as session:
            session.execute(sample_sql, sample_params)
            session.execute(counts_sql, counts_params)
            session.execute(enrollment_status_sql, enrollment_status_params)
            # TODO: Change this to the optimized sql in _update_dv_stored_samples()
            session.execute(sample_status_time_sql, sample_status_time_params)

    def _get_num_baseline_ppi_modules(self):
        return len(
            config.getSettingList(config.BASELINE_PPI_QUESTIONNAIRE_FIELDS))

    def update_enrollment_status(self, summary):
        """Updates the enrollment status field on the provided participant summary to
    the correct value based on the other fields on it. Called after
    a questionnaire response or physical measurements are submitted."""
        consent = (summary.consentForStudyEnrollment == QuestionnaireStatus.SUBMITTED and
                   summary.consentForElectronicHealthRecords == QuestionnaireStatus.SUBMITTED) or \
                  (summary.consentForStudyEnrollment == QuestionnaireStatus.SUBMITTED and
                   summary.consentForElectronicHealthRecords is None and
                   summary.consentForDvElectronicHealthRecordsSharing == QuestionnaireStatus.SUBMITTED)

        enrollment_status = self.calculate_enrollment_status(
            consent, summary.numCompletedBaselinePPIModules,
            summary.physicalMeasurementsStatus, summary.samplesToIsolateDNA)
        summary.enrollmentStatusMemberTime = self.calculate_member_time(
            consent, summary)
        summary.enrollmentStatusCoreOrderedSampleTime = self.calculate_core_ordered_sample_time(
            consent, summary)
        summary.enrollmentStatusCoreStoredSampleTime = self.calculate_core_stored_sample_time(
            consent, summary)

        # Update last modified date if status changes
        if summary.enrollmentStatus != enrollment_status:
            summary.lastModified = clock.CLOCK.now()

        summary.enrollmentStatus = enrollment_status

    def calculate_enrollment_status(self, consent,
                                    num_completed_baseline_ppi_modules,
                                    physical_measurements_status,
                                    samples_to_isolate_dna):
        if consent:
            if (num_completed_baseline_ppi_modules
                    == self._get_num_baseline_ppi_modules()
                    and physical_measurements_status
                    == PhysicalMeasurementsStatus.COMPLETED
                    and samples_to_isolate_dna == SampleStatus.RECEIVED):
                return EnrollmentStatus.FULL_PARTICIPANT
            return EnrollmentStatus.MEMBER
        return EnrollmentStatus.INTERESTED

    def calculate_member_time(self, consent, participant_summary):
        if consent and participant_summary.enrollmentStatusMemberTime is not None:
            return participant_summary.enrollmentStatusMemberTime
        elif consent:
            if participant_summary.consentForElectronicHealthRecords is None and \
              participant_summary.consentForDvElectronicHealthRecordsSharing == \
              QuestionnaireStatus.SUBMITTED:
                return participant_summary.consentForDvElectronicHealthRecordsSharingTime
            return participant_summary.consentForElectronicHealthRecordsTime
        else:
            return None

    def calculate_core_stored_sample_time(self, consent, participant_summary):
        if consent and \
          participant_summary.numCompletedBaselinePPIModules == \
          self._get_num_baseline_ppi_modules() and \
          participant_summary.physicalMeasurementsStatus == PhysicalMeasurementsStatus.COMPLETED and \
          participant_summary.samplesToIsolateDNA == SampleStatus.RECEIVED:

            max_core_sample_time = self.calculate_max_core_sample_time(
                participant_summary, field_name_prefix='sampleStatus')

            if max_core_sample_time and participant_summary.enrollmentStatusCoreStoredSampleTime:
                return participant_summary.enrollmentStatusCoreStoredSampleTime
            else:
                return max_core_sample_time
        else:
            return None

    def calculate_core_ordered_sample_time(self, consent, participant_summary):
        if consent and \
          participant_summary.numCompletedBaselinePPIModules == \
          self._get_num_baseline_ppi_modules() and \
          participant_summary.physicalMeasurementsStatus == PhysicalMeasurementsStatus.COMPLETED:

            max_core_sample_time = self.calculate_max_core_sample_time(
                participant_summary, field_name_prefix='sampleOrderStatus')

            if max_core_sample_time and participant_summary.enrollmentStatusCoreOrderedSampleTime:
                return participant_summary.enrollmentStatusCoreOrderedSampleTime
            else:
                return max_core_sample_time
        else:
            return None

    def calculate_max_core_sample_time(self,
                                       participant_summary,
                                       field_name_prefix='sampleStatus'):

        keys = [
            field_name_prefix + '%sTime' % test
            for test in config.getSettingList(config.DNA_SAMPLE_TEST_CODES)
        ]
        sample_time_list = \
          [v for k, v in participant_summary if k in keys and v is not None]

        sample_time = min(sample_time_list) if sample_time_list else None

        if sample_time is not None:
            return max([
                sample_time, participant_summary.enrollmentStatusMemberTime,
                participant_summary.questionnaireOnTheBasicsTime,
                participant_summary.questionnaireOnLifestyleTime,
                participant_summary.questionnaireOnOverallHealthTime,
                participant_summary.physicalMeasurementsFinalizedTime
            ])
        else:
            return None

    def calculate_distinct_visits(self,
                                  pid,
                                  finalized_time,
                                  id_,
                                  amendment=False):
        """ Participants may get PM or biobank samples on same day. This should be considered as
    a single visit in terms of program payment to participant.
    return Boolean: true if there has not been an order on same date."""
        from dao.biobank_order_dao import BiobankOrderDao
        from dao.physical_measurements_dao import PhysicalMeasurementsDao

        day_has_order, day_has_measurement = False, False
        existing_orders = BiobankOrderDao().get_biobank_orders_for_participant(
            pid)
        ordered_samples = BiobankOrderDao(
        ).get_ordered_samples_for_participant(pid)
        existing_measurements = PhysicalMeasurementsDao(
        ).get_measuremnets_for_participant(pid)

        order_id_to_finalized_date = {
            sample.biobankOrderId: sample.finalized.date()
            for sample in ordered_samples if sample.finalized
        }

        if existing_orders and finalized_time:
            for order in existing_orders:
                order_finalized_date = order_id_to_finalized_date.get(
                    order.biobankOrderId)
                if order_finalized_date == finalized_time.date() and order.biobankOrderId != id_ and \
                  order.orderStatus != BiobankOrderStatus.CANCELLED:
                    day_has_order = True
                elif order.biobankOrderId == id_ and order.orderStatus == BiobankOrderStatus.AMENDED:
                    day_has_order = True
        elif not finalized_time and amendment:
            day_has_order = True

        if existing_measurements and finalized_time:
            for measurement in existing_measurements:
                if not measurement.finalized:
                    continue
                if measurement.finalized.date() == finalized_time.date() and measurement.physicalMeasurementsId\
                  != id_:
                    day_has_measurement = True

        is_distinct_visit = not (day_has_order or day_has_measurement)
        return is_distinct_visit

    def to_client_json(self, model):
        result = model.asdict()
        # Participants that withdrew more than 48 hours ago should have fields other than
        # WITHDRAWN_PARTICIPANT_FIELDS cleared.
        if (model.withdrawalStatus == WithdrawalStatus.NO_USE and
            (model.withdrawalTime is None or model.withdrawalTime <
             clock.CLOCK.now() - WITHDRAWN_PARTICIPANT_VISIBILITY_TIME)):
            result = {k: result.get(k) for k in WITHDRAWN_PARTICIPANT_FIELDS}

        elif model.withdrawalStatus != WithdrawalStatus.NO_USE and \
          model.suspensionStatus == SuspensionStatus.NO_CONTACT:
            for i in SUSPENDED_PARTICIPANT_FIELDS:
                result[i] = UNSET

        result['participantId'] = to_client_participant_id(model.participantId)
        biobank_id = result.get('biobankId')
        if biobank_id:
            result['biobankId'] = to_client_biobank_id(biobank_id)
        date_of_birth = result.get('dateOfBirth')
        if date_of_birth:
            result['ageRange'] = get_bucketed_age(date_of_birth,
                                                  clock.CLOCK.now())
        else:
            result['ageRange'] = UNSET

        if result.get('primaryLanguage') is None:
            result['primaryLanguage'] = UNSET

        if 'organizationId' in result:
            result['organization'] = result['organizationId']
            del result['organizationId']
            format_json_org(result, self.organization_dao, 'organization')

        format_json_hpo(result, self.hpo_dao, 'hpoId')
        result['awardee'] = result['hpoId']
        _initialize_field_type_sets()
        for fieldname in _DATE_FIELDS:
            format_json_date(result, fieldname)
        for fieldname in _CODE_FIELDS:
            format_json_code(result, self.code_dao, fieldname)
        for fieldname in _ENUM_FIELDS:
            format_json_enum(result, fieldname)
        for fieldname in _SITE_FIELDS:
            format_json_site(result, self.site_dao, fieldname)
        if (model.withdrawalStatus == WithdrawalStatus.NO_USE
                or model.suspensionStatus == SuspensionStatus.NO_CONTACT):
            result['recontactMethod'] = 'NO_CONTACT'
        # Strip None values.
        result = {k: v for k, v in result.iteritems() if v is not None}

        return result

    def _decode_token(self, query_def, fields):
        """ If token exists in participant_summary api, decode and use lastModified to add a buffer
    of 60 seconds. This ensures when a _sync link is used no one is missed. This will return
    at a minimum, the last participant and any more that have been modified in the previous 60
    seconds. Duplicate participants returned should be handled on the client side."""
        decoded_vals = super(ParticipantSummaryDao,
                             self)._decode_token(query_def, fields)
        if query_def.order_by and (query_def.order_by.field_name
                                   == 'lastModified'
                                   and query_def.always_return_token is True
                                   and query_def.backfill_sync is True):
            decoded_vals[0] = decoded_vals[0] - datetime.timedelta(
                seconds=config.LAST_MODIFIED_BUFFER_SECONDS)

        return decoded_vals

    @staticmethod
    def update_ehr_status(summary, update_time):
        summary.ehrStatus = EhrStatus.PRESENT
        if not summary.ehrReceiptTime:
            summary.ehrReceiptTime = update_time
        summary.ehrUpdateTime = update_time
        return summary
Exemple #8
0
class DvOrderApiTestBase(FlaskTestBase):
    mayolink_response = None

    def setUp(self, use_mysql=True, with_data=True):
        super(DvOrderApiTestBase, self).setUp(use_mysql=use_mysql,
                                              with_data=with_data)

        self.test_data = {
            "subject": "Patient/P123456789",
            "awardee": "PITT",
            "organization": "PITT_BANNER_HEALTH",
            "patient_status": "YES",
            "user": "******",
            "site": "hpo-site-monroeville",
            "authored": "2019-04-26T12:11:41",
            "comment": "This is comment"
        }

        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.hpo_dao = HPODao()

        self.hpo = self.hpo_dao.get_by_name('PITT')

        self.participant = Participant(hpoId=self.hpo.hpoId,
                                       participantId=123456789,
                                       biobankId=7)
        self.participant_dao.insert(self.participant)
        self.summary = self.participant_summary(self.participant)
        self.summary_dao.insert(self.summary)

    def test_patient_status_created(self):
        data = copy.copy(self.test_data)

        # insert patient status
        url = os.path.join('PatientStatus',
                           'P{0}'.format(self.participant.participantId),
                           'Organization', 'PITT_BANNER_HEALTH')
        resp = self.send_post(url, data, expected_status=httplib.CREATED)

        # test that our test_data dict is in the resp.response dict.
        resp_data = json.loads(resp.response[0])
        self.assertDictContainsSubset(data, resp_data)

        # attempt to insert again, should fail with duplicate.
        self.send_post(url, data, expected_status=httplib.CONFLICT)

        # Get record and test that our test_data dict is in the resp.response dict.
        resp = self.send_get(url)
        self.assertDictContainsSubset(data, resp)

    def test_patient_status_udpated(self):
        data = copy.copy(self.test_data)

        # insert patient status
        url = os.path.join('PatientStatus',
                           'P{0}'.format(self.participant.participantId),
                           'Organization', 'PITT_BANNER_HEALTH')
        resp = self.send_post(url, data, expected_status=httplib.CREATED)

        data['authored'] = '2019-04-27T16:32:01'
        data['comment'] = 'saw patient at new site'
        data['site'] = 'hpo-site-bannerphoenix'

        resp = self.send_put(url, data, expected_status=httplib.OK)
        self.assertDictContainsSubset(data, resp)

        # Get record and test that our test_data dict is in the resp.response dict.
        resp = self.send_get(url)
        self.assertDictContainsSubset(data, resp)