class DvOrderApiTestBase(FlaskTestBase):
    mayolink_response = None

    def setUp(self, use_mysql=True, with_data=True):
        super(DvOrderApiTestBase, self).setUp(use_mysql=use_mysql,
                                              with_data=with_data)
        self.dv_order_dao = DvOrderDao()
        self.hpo_dao = HPODao()
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.code_dao = CodeDao()

        self.hpo = self.hpo_dao.get_by_name('PITT')
        self.participant = Participant(hpoId=self.hpo.hpoId,
                                       participantId=123456789,
                                       biobankId=7)
        self.participant_dao.insert(self.participant)
        self.summary = self.participant_summary(self.participant)
        self.summary_dao.insert(self.summary)

        mayolinkapi_patcher = mock.patch(
            'dao.dv_order_dao.MayoLinkApi',
            **{'return_value.post.return_value': self.mayolink_response})
        mayolinkapi_patcher.start()
        self.addCleanup(mayolinkapi_patcher.stop)

    def get_payload(self, filename):
        return load_test_data_json(filename)

    def get_orders(self):
        with self.dv_order_dao.session() as session:
            return list(session.query(BiobankDVOrder))
Exemple #2
0
    def test_end_to_end(self):
        dao = BiobankStoredSampleDao()
        self.assertEquals(dao.count(), 0)

        # Create 3 participants and pass their (random) IDs into sample rows.
        summary_dao = ParticipantSummaryDao()
        biobank_ids = []
        participant_ids = []
        for _ in xrange(3):
            participant = self.participant_dao.insert(Participant())
            summary_dao.insert(self.participant_summary(participant))
            participant_ids.append(participant.participantId)
            biobank_ids.append(participant.biobankId)
            self.assertEquals(
                summary_dao.get(
                    participant.participantId).numBaselineSamplesArrived, 0)
        test1, test2, test3 = random.sample(_BASELINE_TESTS, 3)
        samples_file = test_data.open_biobank_samples(*biobank_ids,
                                                      test1=test1,
                                                      test2=test2,
                                                      test3=test3)
        input_filename = 'cloud%s.csv' % self._naive_utc_to_naive_central(
            clock.CLOCK.now()).strftime(
                biobank_samples_pipeline.INPUT_CSV_TIME_FORMAT)
        self._write_cloud_csv(input_filename, samples_file.read())

        biobank_samples_pipeline.upsert_from_latest_csv()

        self.assertEquals(dao.count(), 3)
        self._check_summary(participant_ids[0], test1, '2016-11-29T12:19:32')
        self._check_summary(participant_ids[1], test2, '2016-11-29T12:38:58')
        self._check_summary(participant_ids[2], test3, '2016-11-29T12:41:26')
    def test_end_to_end(self):
        dao = BiobankStoredSampleDao()
        self.assertEquals(dao.count(), 0)

        # Create 3 participants and pass their (random) IDs into sample rows.
        summary_dao = ParticipantSummaryDao()
        biobank_ids = []
        participant_ids = []
        nids = 16  # equal to the number of parent rows in 'biobank_samples_1.csv'
        cids = 1  # equal to the number of child rows in 'biobank_samples_1.csv'

        for _ in xrange(nids):
            participant = self.participant_dao.insert(Participant())
            summary_dao.insert(self.participant_summary(participant))
            participant_ids.append(participant.participantId)
            biobank_ids.append(participant.biobankId)
            self.assertEquals(
                summary_dao.get(
                    participant.participantId).numBaselineSamplesArrived, 0)

        test_codes = random.sample(_BASELINE_TESTS, nids)
        samples_file = test_data.open_biobank_samples(biobank_ids=biobank_ids,
                                                      tests=test_codes)
        lines = samples_file.split('\n')[1:]  # remove field name line

        input_filename = 'cloud%s.csv' % self._naive_utc_to_naive_central(
            clock.CLOCK.now()).strftime(
                biobank_samples_pipeline.INPUT_CSV_TIME_FORMAT)
        self._write_cloud_csv(input_filename, samples_file)
        biobank_samples_pipeline.upsert_from_latest_csv()

        self.assertEquals(dao.count(), nids - cids)

        for x in range(0, nids):
            cols = lines[x].split('\t')

            if cols[10].strip():  # skip child sample
                continue

            # If status is 'In Prep', then sample confirmed timestamp should be empty
            if cols[2] == 'In Prep':
                self.assertEquals(len(cols[11]), 0)
            else:
                status = SampleStatus.RECEIVED
                ts_str = cols[11]
                # DA-814 - Participant Summary test status should be: Unset, Received or Disposed only.
                # If sample is disposed, then check disposed timestamp, otherwise check confirmed timestamp.
                # DA-871 - Only check status is disposed when reason code is a bad disposal.
                if cols[2] == 'Disposed' and get_sample_status_enum_value(
                        cols[8]) > SampleStatus.UNKNOWN:
                    status = SampleStatus.DISPOSED
                    ts_str = cols[9]

                ts = datetime.datetime.strptime(ts_str, '%Y/%m/%d %H:%M:%S')
                self._check_summary(participant_ids[x], test_codes[x], ts,
                                    status)
class ParticipantSummaryDaoTest(NdbTestBase):

  def setUp(self):
    super(ParticipantSummaryDaoTest, self).setUp(use_mysql=True)
    self.dao = ParticipantSummaryDao()
    self.order_dao = BiobankOrderDao()
    self.measurement_dao = PhysicalMeasurementsDao()
    self.participant_dao = ParticipantDao()
    self.no_filter_query = Query([], None, 2, None)
    self.one_filter_query = Query([FieldFilter("participantId", Operator.EQUALS, 1)],
                                  None, 2, None)
    self.two_filter_query = Query([FieldFilter("participantId", Operator.EQUALS, 1),
                                   FieldFilter("hpoId", Operator.EQUALS, PITT_HPO_ID)],
                                  None, 2, None)
    self.ascending_biobank_id_query = Query([], OrderBy("biobankId", True), 2, None)
    self.descending_biobank_id_query = Query([], OrderBy("biobankId", False), 2, None)
    self.enrollment_status_order_query = Query([], OrderBy("enrollmentStatus", True), 2, None)
    self.hpo_id_order_query = Query([], OrderBy("hpoId", True), 2, None)
    self.first_name_order_query = Query([], OrderBy("firstName", True), 2, None)

  def assert_no_results(self, query):
    results = self.dao.query(query)
    self.assertEquals([], results.items)
    self.assertIsNone(results.pagination_token)

  def assert_results(self, query, items, pagination_token=None):
    results = self.dao.query(query)
    self.assertListAsDictEquals(items, results.items)
    self.assertEquals(pagination_token, results.pagination_token,
                      "Pagination tokens don't match; decoded = %s, %s" %
                      (_decode_token(pagination_token), _decode_token(results.pagination_token)))

  def test_query_with_total(self):
    num_participants = 5
    query = Query([], None, 10, None, include_total=True)
    results = self.dao.query(query)
    self.assertEqual(results.total, 0)
    for i in range(num_participants):
      participant = Participant(participantId=i, biobankId=i)
      self._insert(participant)
    results = self.dao.query(query)
    self.assertEqual(results.total, num_participants)

  def testQuery_noSummaries(self):
    self.assert_no_results(self.no_filter_query)
    self.assert_no_results(self.one_filter_query)
    self.assert_no_results(self.two_filter_query)
    self.assert_no_results(self.ascending_biobank_id_query)
    self.assert_no_results(self.descending_biobank_id_query)

  def _insert(self, participant, first_name=None, last_name=None):
    self.participant_dao.insert(participant)
    summary = self.participant_summary(participant)
    if first_name:
      summary.firstName = first_name
    if last_name:
      summary.lastName = last_name
    self.dao.insert(summary)
    return participant

  def testQuery_oneSummary(self):
    participant = Participant(participantId=1, biobankId=2)
    self._insert(participant)
    summary = self.dao.get(1)
    self.assert_results(self.no_filter_query, [summary])
    self.assert_results(self.one_filter_query, [summary])
    self.assert_no_results(self.two_filter_query)
    self.assert_results(self.ascending_biobank_id_query, [summary])
    self.assert_results(self.descending_biobank_id_query, [summary])

  def testUnicodeNameRoundTrip(self):
    name = self.fake.first_name()
    with self.assertRaises(UnicodeEncodeError):
      str(name)  # sanity check that the name contains non-ASCII
    participant = self._insert(Participant(participantId=1, biobankId=2))
    summary = self.dao.get(participant.participantId)
    summary.firstName = name
    self.dao.update(summary)
    fetched_summary = self.dao.get(participant.participantId)
    self.assertEquals(name, fetched_summary.firstName)

  def testQuery_twoSummaries(self):
    participant_1 = Participant(participantId=1, biobankId=2)
    self._insert(participant_1, 'Alice', 'Smith')
    participant_2 = Participant(participantId=2, biobankId=1)
    self._insert(participant_2, 'Zed', 'Zebra')
    ps_1 = self.dao.get(1)
    ps_2 = self.dao.get(2)
    self.assert_results(self.no_filter_query, [ps_1, ps_2])
    self.assert_results(self.one_filter_query, [ps_1])
    self.assert_no_results(self.two_filter_query)
    self.assert_results(self.ascending_biobank_id_query, [ps_2, ps_1])
    self.assert_results(self.descending_biobank_id_query, [ps_1, ps_2])

  def testQuery_threeSummaries_paginate(self):
    participant_1 = Participant(participantId=1, biobankId=4)
    self._insert(participant_1, 'Alice', 'Aardvark')
    participant_2 = Participant(participantId=2, biobankId=1)
    self._insert(participant_2, 'Bob', 'Builder')
    participant_3 = Participant(participantId=3, biobankId=3)
    self._insert(participant_3, 'Chad', 'Caterpillar')
    ps_1 = self.dao.get(1)
    ps_2 = self.dao.get(2)
    ps_3 = self.dao.get(3)
    self.assert_results(self.no_filter_query, [ps_1, ps_2],
                        _make_pagination_token(['Builder', 'Bob', None, 2]))
    self.assert_results(self.one_filter_query, [ps_1])
    self.assert_no_results(self.two_filter_query)
    self.assert_results(self.ascending_biobank_id_query, [ps_2, ps_3],
                        _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3]))
    self.assert_results(self.descending_biobank_id_query, [ps_1, ps_3],
                        _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3]))

    self.assert_results(_with_token(self.no_filter_query,
                                    _make_pagination_token(['Builder', 'Bob', None, 2])), [ps_3])
    self.assert_results(_with_token(self.ascending_biobank_id_query,
                                    _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3])),
                        [ps_1])
    self.assert_results(_with_token(self.descending_biobank_id_query,
                                    _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3])),
                        [ps_2])

  def testQuery_fourFullSummaries_paginate(self):
    participant_1 = Participant(participantId=1, biobankId=4)
    self._insert(participant_1, 'Bob', 'Jones')
    participant_2 = Participant(participantId=2, biobankId=1)
    self._insert(participant_2, 'Bob', 'Jones')
    participant_3 = Participant(participantId=3, biobankId=3)
    self._insert(participant_3, 'Bob', 'Jones')
    participant_4 = Participant(participantId=4, biobankId=2)
    self._insert(participant_4, 'Bob', 'Jones')
    ps_1 = self.dao.get(1)
    ps_2 = self.dao.get(2)
    ps_3 = self.dao.get(3)
    ps_4 = self.dao.get(4)

    ps_1.lastName = 'Jones'
    ps_1.firstName = 'Bob'
    ps_1.dateOfBirth = datetime.date(1978, 10, 9)
    ps_1.hpoId = PITT_HPO_ID
    self.dao.update(ps_1)

    ps_2.lastName = 'Aardvark'
    ps_2.firstName = 'Bob'
    ps_2.dateOfBirth = datetime.date(1978, 10, 10)
    ps_2.enrollmentStatus = EnrollmentStatus.MEMBER
    self.dao.update(ps_2)

    ps_3.lastName = 'Jones'
    ps_3.firstName = 'Bob'
    ps_3.dateOfBirth = datetime.date(1978, 10, 10)
    ps_3.hpoId = PITT_HPO_ID
    ps_3.enrollmentStatus = EnrollmentStatus.MEMBER
    self.dao.update(ps_3)

    ps_4.lastName = 'Jones'
    ps_4.enrollmentStatus = EnrollmentStatus.FULL_PARTICIPANT
    self.dao.update(ps_4)

    self.assert_results(self.no_filter_query, [ps_2, ps_4],
                        _make_pagination_token(['Jones', 'Bob', None, 4]))
    self.assert_results(self.one_filter_query, [ps_1])
    self.assert_results(self.two_filter_query, [ps_1])
    self.assert_results(self.ascending_biobank_id_query, [ps_2, ps_4],
                        _make_pagination_token([2, 'Jones', 'Bob', None, 4]))
    self.assert_results(self.descending_biobank_id_query, [ps_1, ps_3],
                        _make_pagination_token([3, 'Jones', 'Bob', datetime.date(1978, 10, 10), 3]))
    self.assert_results(self.hpo_id_order_query, [ps_2, ps_4],
                        _make_pagination_token([0, 'Jones', 'Bob', None, 4]))
    self.assert_results(self.enrollment_status_order_query, [ps_1, ps_2],
                        _make_pagination_token(['MEMBER', 'Aardvark', 'Bob',
                                                datetime.date(1978, 10, 10), 2]))

    self.assert_results(_with_token(self.no_filter_query,
                                    _make_pagination_token(['Jones', 'Bob', None, 4])),
                        [ps_1, ps_3])
    self.assert_results(_with_token(self.ascending_biobank_id_query,
                                    _make_pagination_token([2, 'Jones', 'Bob', None, 4])),
                        [ps_3, ps_1])
    self.assert_results(_with_token(self.descending_biobank_id_query,
                                    _make_pagination_token([3, 'Jones', 'Bob',
                                                            datetime.date(1978, 10, 10), 3])),
                        [ps_4, ps_2])
    self.assert_results(_with_token(self.hpo_id_order_query,
                                    _make_pagination_token([0, 'Jones', 'Bob', None, 4])),
                        [ps_1, ps_3])
    self.assert_results(_with_token(self.enrollment_status_order_query,
                                    _make_pagination_token(['MEMBER', 'Aardvark', 'Bob',
                                                datetime.date(1978, 10, 10), 2])),
                        [ps_3, ps_4])

  def test_update_from_samples(self):
    # baseline_tests = ['BASELINE1', 'BASELINE2']
    baseline_tests = ["1PST8", "2PST8"]

    config.override_setting(config.BASELINE_SAMPLE_TEST_CODES, baseline_tests)
    self.dao.update_from_biobank_stored_samples()  # safe noop

    p_baseline_samples = self._insert(Participant(participantId=1, biobankId=11))
    p_mixed_samples = self._insert(Participant(participantId=2, biobankId=22))
    p_no_samples = self._insert(Participant(participantId=3, biobankId=33))
    p_unconfirmed = self._insert(Participant(participantId=4, biobankId=44))
    self.assertEquals(self.dao.get(p_baseline_samples.participantId).numBaselineSamplesArrived, 0)

    def get_p_baseline_last_modified():
      return self.dao.get(p_baseline_samples.participantId).lastModified
    p_baseline_last_modified1 = get_p_baseline_last_modified()

    sample_dao = BiobankStoredSampleDao()

    def add_sample(participant, test_code, sample_id):
      TIME = datetime.datetime(2018, 3, 2)
      sample_dao.insert(BiobankStoredSample(
          biobankStoredSampleId=sample_id, biobankId=participant.biobankId,
        biobankOrderIdentifier='KIT', test=test_code, confirmed=TIME))

    add_sample(p_baseline_samples, baseline_tests[0], '11111')
    add_sample(p_baseline_samples, baseline_tests[1], '22223')
    add_sample(p_mixed_samples, baseline_tests[0], '11112')
    add_sample(p_mixed_samples, 'NOT1', '44441')
    # add unconfirmed sample
    sample_dao.insert(BiobankStoredSample(biobankStoredSampleId=55555,
                                          biobankId=p_unconfirmed.biobankId,
                                          biobankOrderIdentifier='KIT', test=baseline_tests[1],
                                          confirmed=None))
    # sleep 1 sec to make lastModified different
    time.sleep(1)
    self.dao.update_from_biobank_stored_samples()

    p_baseline_last_modified2 = get_p_baseline_last_modified()
    self.assertNotEquals(p_baseline_last_modified2, p_baseline_last_modified1)

    self.assertEquals(self.dao.get(p_baseline_samples.participantId).numBaselineSamplesArrived, 2)
    self.assertEquals(self.dao.get(p_mixed_samples.participantId).numBaselineSamplesArrived, 1)
    self.assertEquals(self.dao.get(p_no_samples.participantId).numBaselineSamplesArrived, 0)
    self.assertEquals(self.dao.get(p_unconfirmed.participantId).numBaselineSamplesArrived, 0)

    M_baseline_samples = self._insert(Participant(participantId=9, biobankId=99))
    add_sample(M_baseline_samples, baseline_tests[0], '999')
    M_first_update = self.dao.get(M_baseline_samples.participantId)
    # sleep 1 sec to make lastModified different
    time.sleep(1)
    self.dao.update_from_biobank_stored_samples()
    add_sample(M_baseline_samples, baseline_tests[1], '9999')
    M_second_update = self.dao.get(M_baseline_samples.participantId)
    # sleep 1 sec to make lastModified different
    time.sleep(1)
    self.dao.update_from_biobank_stored_samples()

    self.assertNotEqual(M_first_update.lastModified, M_second_update.lastModified)
    self.assertEquals(get_p_baseline_last_modified(), p_baseline_last_modified2)

  def test_update_from_samples_changed_tests(self):
    baseline_tests = ["1PST8", "2PST8"]
    config.override_setting(config.BASELINE_SAMPLE_TEST_CODES, baseline_tests)
    self.dao.update_from_biobank_stored_samples()  # safe noop

    participant = self._insert(Participant(participantId=1, biobankId=11))
    self.assertEquals(self.dao.get(participant.participantId).numBaselineSamplesArrived, 0)

    sample_dao = BiobankStoredSampleDao()
    def add_sample(test_code, sample_id):
      TIME = datetime.datetime(2018, 3, 2)
      sample_dao.insert(BiobankStoredSample(
          biobankStoredSampleId=sample_id, biobankId=participant.biobankId,
          biobankOrderIdentifier='KIT', test=test_code, confirmed=TIME))

    add_sample(baseline_tests[0], '11111')
    add_sample(baseline_tests[1], '22223')
    self.dao.update_from_biobank_stored_samples()
    summary = self.dao.get(participant.participantId)
    init_last_modified = summary.lastModified
    self.assertEquals(summary.numBaselineSamplesArrived, 2)
    # sleep 1 sec to make lastModified different
    time.sleep(1)
    # Simulate removal of one of the baseline tests from config.json.
    baseline_tests.pop()
    config.override_setting(config.BASELINE_SAMPLE_TEST_CODES, baseline_tests)
    self.dao.update_from_biobank_stored_samples()

    summary = self.dao.get(participant.participantId)
    self.assertEquals(summary.numBaselineSamplesArrived, 1)
    self.assertNotEqual(init_last_modified, summary.lastModified)

  def test_only_update_dna_sample(self):
    dna_tests = ["1ED10", "1SAL2"]

    config.override_setting(config.DNA_SAMPLE_TEST_CODES, dna_tests)
    self.dao.update_from_biobank_stored_samples()  # safe noop

    p_dna_samples = self._insert(Participant(participantId=1, biobankId=11))

    self.assertEquals(self.dao.get(p_dna_samples.participantId).samplesToIsolateDNA, None)
    self.assertEquals(
      self.dao.get(p_dna_samples.participantId).enrollmentStatusCoreStoredSampleTime, None)
    self.assertEquals(
      self.dao.get(p_dna_samples.participantId).enrollmentStatusCoreOrderedSampleTime, None)

    sample_dao = BiobankStoredSampleDao()

    def add_sample(participant, test_code, sample_id, confirmed_time):
      sample_dao.insert(BiobankStoredSample(
          biobankStoredSampleId=sample_id, biobankId=participant.biobankId,
          biobankOrderIdentifier='KIT', test=test_code, confirmed=confirmed_time))

    confirmed_time_0 = datetime.datetime(2018, 3, 1)
    add_sample(p_dna_samples, dna_tests[0], '11111', confirmed_time_0)

    self.dao.update_from_biobank_stored_samples()

    self.assertEquals(self.dao.get(p_dna_samples.participantId).samplesToIsolateDNA,
                      SampleStatus.RECEIVED)
    # only update dna sample will not update enrollmentStatusCoreStoredSampleTime
    self.assertEquals(
      self.dao.get(p_dna_samples.participantId).enrollmentStatusCoreStoredSampleTime, None)
    self.assertEquals(
      self.dao.get(p_dna_samples.participantId).enrollmentStatusCoreOrderedSampleTime, None)

  def test_calculate_enrollment_status(self):
    self.assertEquals(EnrollmentStatus.FULL_PARTICIPANT,
                      self.dao.calculate_enrollment_status(True,
                                                           NUM_BASELINE_PPI_MODULES,
                                                           PhysicalMeasurementsStatus.COMPLETED,
                                                           SampleStatus.RECEIVED))
    self.assertEquals(EnrollmentStatus.MEMBER,
                      self.dao.calculate_enrollment_status(True,
                                                           NUM_BASELINE_PPI_MODULES - 1,
                                                           PhysicalMeasurementsStatus.COMPLETED,
                                                           SampleStatus.RECEIVED))
    self.assertEquals(EnrollmentStatus.MEMBER,
                      self.dao.calculate_enrollment_status(True,
                                                           NUM_BASELINE_PPI_MODULES,
                                                           PhysicalMeasurementsStatus.UNSET,
                                                           SampleStatus.RECEIVED))
    self.assertEquals(EnrollmentStatus.MEMBER,
                      self.dao.calculate_enrollment_status(True,
                                                           NUM_BASELINE_PPI_MODULES,
                                                           PhysicalMeasurementsStatus.COMPLETED,
                                                           SampleStatus.UNSET))
    self.assertEquals(EnrollmentStatus.INTERESTED,
                      self.dao.calculate_enrollment_status(False,
                                                           NUM_BASELINE_PPI_MODULES,
                                                           PhysicalMeasurementsStatus.COMPLETED,
                                                           SampleStatus.RECEIVED))

  def testUpdateEnrollmentStatus(self):
    ehr_consent_time = datetime.datetime(2018, 3, 1)
    summary = ParticipantSummary(
        participantId=1,
        biobankId=2,
        consentForStudyEnrollment=QuestionnaireStatus.SUBMITTED,
        consentForElectronicHealthRecords=QuestionnaireStatus.SUBMITTED,
        consentForElectronicHealthRecordsTime=ehr_consent_time,
        enrollmentStatus=EnrollmentStatus.INTERESTED)
    self.dao.update_enrollment_status(summary)
    self.assertEquals(EnrollmentStatus.MEMBER, summary.enrollmentStatus)
    self.assertEquals(ehr_consent_time, summary.enrollmentStatusMemberTime)

  def testUpdateEnrollmentStatusLastModified(self):
    """DA-631: enrollment_status update should update last_modified."""
    participant = self._insert(Participant(participantId=6, biobankId=66))
    # collect current modified and enrollment status
    summary = self.dao.get(participant.participantId)
    test_dt = datetime.datetime(2018, 11, 1)

    def reset_summary():
      # change summary so enrollment status will be changed from INTERESTED to MEMBER.
      summary.enrollmentStatus = EnrollmentStatus.INTERESTED
      summary.lastModified = test_dt
      summary.consentForStudyEnrollment = QuestionnaireStatus.SUBMITTED
      summary.consentForElectronicHealthRecords = QuestionnaireStatus.SUBMITTED
      summary.physicalMeasurementsStatus = PhysicalMeasurementsStatus.COMPLETED
      summary.samplesToIsolateDNA = SampleStatus.RECEIVED
      self.dao.update(summary)

    ## Test Step 1: Validate update_from_biobank_stored_samples() changes lastModified.
    reset_summary()

    # Update and reload summary record
    self.dao.update_from_biobank_stored_samples(participant_id=participant.participantId)
    summary = self.dao.get(participant.participantId)

    # Test that status has changed and lastModified is also different
    self.assertEquals(EnrollmentStatus.MEMBER, summary.enrollmentStatus)
    self.assertNotEqual(test_dt, summary.lastModified)

    ## Test Step 2: Validate that update_enrollment_status() changes the lastModified property.
    reset_summary()
    summary = self.dao.get(participant.participantId)

    self.assertEqual(test_dt, summary.lastModified)

    # update_enrollment_status() does not touch the db, it only modifies object properties.
    self.dao.update_enrollment_status(summary)

    self.assertEquals(EnrollmentStatus.MEMBER, summary.enrollmentStatus)
    self.assertNotEqual(test_dt, summary.lastModified)

  def testNumberDistinctVisitsCounts(self):
    self.participant = self._insert(Participant(participantId=7, biobankId=77))
    # insert biobank order
    order = self.order_dao.insert(self._make_biobank_order())
    summary = self.dao.get(self.participant.participantId)
    self.assertEquals(summary.numberDistinctVisits, 1)
    cancel_request = cancel_biobank_order()
    # cancel biobank order
    self.order_dao.update_with_patch(order.biobankOrderId, cancel_request, order.version)
    summary = self.dao.get(self.participant.participantId)
    # distinct count should be 0
    self.assertEquals(summary.numberDistinctVisits, 0)

    self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                             TIME_1.isoformat()))
    # insert physical measurement
    measurement = self.measurement_dao.insert(self._make_physical_measurements())
    summary = self.dao.get(self.participant.participantId)
    # count should be 1
    self.assertEquals(summary.numberDistinctVisits, 1)

    # cancel the measurement
    cancel_measurement = get_restore_or_cancel_info()
    with self.measurement_dao.session() as session:
      self.measurement_dao.update_with_patch(measurement.physicalMeasurementsId, session,
                                             cancel_measurement)

    summary = self.dao.get(self.participant.participantId)
    # count should be 0
    self.assertEquals(summary.numberDistinctVisits, 0)

    with clock.FakeClock(TIME_1):
      self.order_dao.insert(self._make_biobank_order(biobankOrderId='2', identifiers=[
        BiobankOrderIdentifier(system='b', value='d')], samples=[BiobankOrderedSample(
                                                        biobankOrderId='2',
                                                        test=BIOBANK_TESTS[0],
                                                        description='description',
                                                        processingRequired=True)]))
    with clock.FakeClock(TIME_2):
      self.measurement_dao.insert(self._make_physical_measurements(
        physicalMeasurementsId=2))
      summary = self.dao.get(self.participant.participantId)

      # A PM on another day should add a new distinct count.
      self.assertEquals(summary.numberDistinctVisits, 2)

    with clock.FakeClock(TIME_3):
      self.order_dao.insert(self._make_biobank_order(biobankOrderId='3', identifiers=[
        BiobankOrderIdentifier(system='s', value='s')], samples=[BiobankOrderedSample(
        biobankOrderId ='3',
        finalized=TIME_3,
        test=BIOBANK_TESTS[1],
        description='another description',
        processingRequired=False)]))

      # a physical measurement on same day as biobank order does not add distinct visit.
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=6))

      # another biobank order on the same day should also not add a distinct visit
      self.order_dao.insert(self._make_biobank_order(biobankOrderId='7', identifiers=[
          BiobankOrderIdentifier(system='x', value='x')], samples=[BiobankOrderedSample(
              biobankOrderId ='7',
              finalized=TIME_3,
              test=BIOBANK_TESTS[1],
              description='another description',
              processingRequired=False)]))

      summary = self.dao.get(self.participant.participantId)
      # 1 from each of TIME_1 TIME_2 TIME_3
      self.assertEquals(summary.numberDistinctVisits, 3)


  def test_qa_scenarios_for_pmb_visits(self):
    """ PDR at https://docs.google.com/document/d/1sL54f-I91RvhjIprrdbwD8TlR9Jq91MX2ELf1EtJdxc/edit#heading=h.bqo8kt3igsrw<Paste> """
    self.participant = self._insert(Participant(participantId=6, biobankId=66))

    # test scenario 1
    with clock.FakeClock(TIME_4):
      self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                               TIME_4.isoformat()))
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=666,
                                                                   participantId=self.participant.participantId,
                                                                   finalized=TIME_4))
      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 1)

    with clock.FakeClock(TIME_5):
      self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                               TIME_5.isoformat()))
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=669,
                                                                   finalized=TIME_5))
      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 2)

    # test scenario 2
    with clock.FakeClock(TIME_6):
      self.participant = self._insert(Participant(participantId=9, biobankId=13))
      self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                               TIME_6.isoformat()))
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=8,
                                                                   finalized=TIME_6))
      self.order_dao.insert(self._make_biobank_order(biobankOrderId='2', identifiers=[
        BiobankOrderIdentifier(system='b', value='d')], samples=[BiobankOrderedSample(
                                                        biobankOrderId='2',
                                                        finalized=TIME_7,
                                                        test=BIOBANK_TESTS[0],
                                                        description='description',
                                                        processingRequired=True)]))


      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 2
      self.assertEquals(summary.numberDistinctVisits, 2)

    # test scenario 3
    with clock.FakeClock(TIME_6):
      self.participant = self._insert(Participant(participantId=66, biobankId=42))
      self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                               TIME_6.isoformat()))
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=12,
                                                                   createdSiteId=2,
                                                                   finalized=TIME_6))

      self.order_dao.insert(self._make_biobank_order(biobankOrderId='18', finalizedSiteId=1, identifiers=[
          BiobankOrderIdentifier(system='x', value='y')], samples=[BiobankOrderedSample(
              biobankOrderId='18',
              finalized=TIME_6,
              test=BIOBANK_TESTS[0],
              description='description',
              processingRequired=True)]))


      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 1
      self.assertEquals(summary.numberDistinctVisits, 1)

    # test scenario 4
    with clock.FakeClock(TIME_8):
      self.participant = self._insert(Participant(participantId=6613, biobankId=142))
      self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                               TIME_8.isoformat()))
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=129,
                                                                   finalized=TIME_8))

      order = self.order_dao.insert(self._make_biobank_order(biobankOrderId='999', identifiers=[
          BiobankOrderIdentifier(system='s', value='s')], samples=[BiobankOrderedSample(
              biobankOrderId='999',
              finalized=TIME_8,
              test=BIOBANK_TESTS[1],
              description='description',
              processingRequired=True)]))
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 1
      self.assertEquals(summary.numberDistinctVisits, 1)

     # change finalized time, recalculating count
      with self.order_dao.session() as session:
        existing_order = copy.deepcopy(order)
        order.samples[0].finalized = TIME_9
        self.order_dao._do_update(session, order, existing_order)

      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 1)

     # change test, should not change count.
      with self.order_dao.session() as session:
        existing_order = copy.deepcopy(order)
        order.samples[0].test = BIOBANK_TESTS[0]
        self.order_dao._do_update(session, order, existing_order)

      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 1)

    # test scenario 5
    with clock.FakeClock(TIME_12):
      self.participant = self._insert(Participant(participantId=3000, biobankId=2019))

      self.order_dao.insert(self._make_biobank_order(biobankOrderId='700', identifiers=[
          BiobankOrderIdentifier(system='n', value='s')], samples=[BiobankOrderedSample(
              biobankOrderId='700',
              finalized=TIME_10,
              test=BIOBANK_TESTS[1],
              description='description',
              processingRequired=True)]))
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 1
      self.assertEquals(summary.numberDistinctVisits, 1)

      other_order = self.order_dao.insert(self._make_biobank_order(biobankOrderId='701', identifiers=[
          BiobankOrderIdentifier(system='n', value='t')], samples=[BiobankOrderedSample(
              biobankOrderId='701',
              finalized=TIME_11,
              test=BIOBANK_TESTS[1],
              description='description',
              processingRequired=True)]))
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 2
      self.assertEquals(summary.numberDistinctVisits, 2)

      order = self.order_dao.insert(self._make_biobank_order(biobankOrderId='702', identifiers=[
          BiobankOrderIdentifier(system='n', value='u')], samples=[BiobankOrderedSample(
              biobankOrderId='702',
              finalized=TIME_12,
              test=BIOBANK_TESTS[1],
              description='description',
              processingRequired=True)]))
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 3
      self.assertEquals(summary.numberDistinctVisits, 3)

      self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                               TIME_12.isoformat()))
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=120,
                                                                   finalized=TIME_12))

      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 3
      self.assertEquals(summary.numberDistinctVisits, 3)
      cancel_request = cancel_biobank_order()
      # cancel biobank order with PM on same day
      self.order_dao.update_with_patch(order.biobankOrderId, cancel_request, order.version)
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 3 (the PM on same day still counts)
      self.assertEquals(summary.numberDistinctVisits, 3)

      self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                               TIME_1.isoformat()))
      self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=150,
                                                                   finalized=TIME_1))
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 4
      self.assertEquals(summary.numberDistinctVisits, 4)
      # cancel order with pm on different day
      self.order_dao.update_with_patch(other_order.biobankOrderId, cancel_request, order.version)
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 3
      self.assertEquals(summary.numberDistinctVisits, 3)

  def test_pm_restore_cancel_biobank_restore_cancel(self):
    self.participant = self._insert(Participant(participantId=9, biobankId=13))
    self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                             TIME_4.isoformat()))
    measurement = self.measurement_dao.insert(self._make_physical_measurements(physicalMeasurementsId=669,
                                                                 finalized=TIME_4))
    summary = self.dao.get(self.participant.participantId)
    self.assertEquals(summary.numberDistinctVisits, 1)

    with clock.FakeClock(TIME_5):
      order = self.order_dao.insert(self._make_biobank_order(biobankOrderId='2', identifiers=[
        BiobankOrderIdentifier(system='b', value='d')], samples=[BiobankOrderedSample(
                                                        biobankOrderId='2',
                                                        finalized=TIME_5,
                                                        test=BIOBANK_TESTS[0],
                                                        description='description',
                                                        processingRequired=True)]))


    with clock.FakeClock(TIME_7):
      summary = self.dao.get(self.participant.participantId)
      # distinct count should be 2
      self.assertEquals(summary.numberDistinctVisits, 2)

      # cancel the measurement
      cancel_measurement = get_restore_or_cancel_info()
      with self.measurement_dao.session() as session:
        self.measurement_dao.update_with_patch(measurement.physicalMeasurementsId, session,
                                               cancel_measurement)

      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 1)

    with clock.FakeClock(TIME_7):
      restore_measurement = get_restore_or_cancel_info(status='restored')
      with self.measurement_dao.session() as session:
        self.measurement_dao.update_with_patch(measurement.physicalMeasurementsId, session,
                                               restore_measurement)

      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 2)


      cancel_request = cancel_biobank_order()
      order = self.order_dao.update_with_patch(order.biobankOrderId, cancel_request, order.version)

      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 1)

      restore_order = get_restore_or_cancel_info(status='restored')
      restore_order['amendedReason'] = 'some reason'
      self.order_dao.update_with_patch(order.biobankOrderId, restore_order, order.version)
      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 2)

  def test_amending_biobank_order_distinct_visit_count(self):
    self.participant = self._insert(Participant(participantId=9, biobankId=13))
    with clock.FakeClock(TIME_5):
      order = self.order_dao.insert(self._make_biobank_order(biobankOrderId='2', identifiers=[
        BiobankOrderIdentifier(system='b', value='d')], samples=[BiobankOrderedSample(
                                                        biobankOrderId='2',
                                                        finalized=TIME_5,
                                                        test=BIOBANK_TESTS[0],
                                                        description='description',
                                                        processingRequired=True)]))

      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 1)

    with clock.FakeClock(TIME_7):
      amend_order = self._get_amended_info(order)
      with self.order_dao.session() as session:
        self.order_dao._do_update(session, amend_order, order)

      # Shouldn't change on a simple amendment (unless finalized time on samples change)
      summary = self.dao.get(self.participant.participantId)
      self.assertEquals(summary.numberDistinctVisits, 1)

    with clock.FakeClock(TIME_7_5):
      cancel_request = cancel_biobank_order()
      order = self.order_dao.update_with_patch(order.biobankOrderId, cancel_request, order.version)

    # A cancelled order (even after amending) should reduce count (unless some other valid order on same day)
    summary = self.dao.get(self.participant.participantId)
    self.assertEquals(summary.numberDistinctVisits, 0)

  @staticmethod
  def _get_amended_info(order):
    amendment = dict(amendedReason='I had to change something', amendedInfo={
      "author": {
        "system": "https://www.pmi-ops.org/healthpro-username",
        "value": "*****@*****.**"
      },
      "site": {
        "system": "https://www.pmi-ops.org/site-id",
        "value": "hpo-site-monroeville"
      }
    })

    order.amendedReason = amendment['amendedReason']
    order.amendedInfo = amendment['amendedInfo']
    return order



  def _make_biobank_order(self, **kwargs):
    """Makes a new BiobankOrder (same values every time) with valid/complete defaults.

    Kwargs pass through to BiobankOrder constructor, overriding defaults.
    """
    for k, default_value in (
        ('biobankOrderId', '1'),
        ('created', clock.CLOCK.now()),
        ('participantId', self.participant.participantId),
        ('sourceSiteId', 1),
        ('sourceUsername', '*****@*****.**'),
        ('collectedSiteId', 1),
        ('collectedUsername', '*****@*****.**'),
        ('processedSiteId', 1),
        ('processedUsername', '*****@*****.**'),
        ('finalizedSiteId', 2),
        ('finalizedUsername', '*****@*****.**'),
        ('identifiers', [BiobankOrderIdentifier(system='a', value='c')]),
        ('samples', [BiobankOrderedSample(
            biobankOrderId='1',
            test=BIOBANK_TESTS[0],
            description='description',
            finalized=TIME_1,
            processingRequired=True)])):
      if k not in kwargs:
        kwargs[k] = default_value
    return BiobankOrder(**kwargs)

  def _make_physical_measurements(self, **kwargs):
    """Makes a new PhysicalMeasurements (same values every time) with valid/complete defaults.

    Kwargs pass through to PM constructor, overriding defaults.
    """
    for k, default_value in (
        ('physicalMeasurementsId', 1),
        ('participantId', self.participant.participantId),
        ('resource', self.measurement_json),
        ('createdSiteId', 1),
        ('finalized', TIME_3),
        ('finalizedSiteId', 2)):
      if k not in kwargs:
        kwargs[k] = default_value
    return PhysicalMeasurements(**kwargs)
Exemple #5
0
class BiobankSamplesPipelineTest(CloudStorageSqlTestBase, NdbTestBase):
    def setUp(self):
        super(BiobankSamplesPipelineTest, self).setUp(use_mysql=True)
        NdbTestBase.doSetUp(self)
        TestBase.setup_fake(self)
        config.override_setting(config.BASELINE_SAMPLE_TEST_CODES,
                                _BASELINE_TESTS)
        # Everything is stored as a list, so override bucket name as a 1-element list.
        config.override_setting(config.BIOBANK_SAMPLES_BUCKET_NAME,
                                [_FAKE_BUCKET])
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()

    def _write_cloud_csv(self, file_name, contents_str):
        with cloudstorage_api.open('/%s/%s' % (_FAKE_BUCKET, file_name),
                                   mode='w') as cloud_file:
            cloud_file.write(contents_str.encode('utf-8'))

    def _make_biobank_order(self, **kwargs):
        """Makes a new BiobankOrder (same values every time) with valid/complete defaults.

    Kwargs pass through to BiobankOrder constructor, overriding defaults.
    """
        participantId = kwargs['participantId']
        modified = datetime.datetime(2019, 03, 25, 15, 59, 30)

        for k, default_value in (
            ('biobankOrderId', u'1'),
            ('created', clock.CLOCK.now()),
                # ('participantId', self.participant.participantId),
            ('sourceSiteId', 1),
            ('sourceUsername', u'*****@*****.**'),
            ('collectedSiteId', 1),
            ('collectedUsername', u'*****@*****.**'),
            ('processedSiteId', 1),
            ('processedUsername', u'*****@*****.**'),
            ('finalizedSiteId', 2),
            ('finalizedUsername', u'*****@*****.**'),
            ('version', 1),
            ('identifiers', [BiobankOrderIdentifier(system=u'a', value=u'c')]),
            ('samples', [
                BiobankOrderedSample(test=u'1SAL2',
                                     description=u'description',
                                     processingRequired=True)
            ]),
            ('dvOrders', [
                BiobankDVOrder(participantId=participantId,
                               modified=modified,
                               version=1)
            ])):
            if k not in kwargs:
                kwargs[k] = default_value
        return BiobankOrder(**kwargs)

    def test_dv_order_sample_update(self):
        """
    Test Biobank Direct Volunteer order
    """
        participant = self.participant_dao.insert(Participant())
        self.summary_dao.insert(self.participant_summary(participant))

        created_ts = datetime.datetime(2019, 03, 22, 18, 30, 45)
        confirmed_ts = datetime.datetime(2019, 03, 23, 12, 13, 00)

        bo = self._make_biobank_order(participantId=participant.participantId)
        BiobankOrderDao().insert(bo)

        boi = bo.identifiers[0]

        bss = BiobankStoredSample(biobankStoredSampleId=u'23523523',
                                  biobankId=participant.biobankId,
                                  test=u'1SAL2',
                                  created=created_ts,
                                  biobankOrderIdentifier=boi.value,
                                  confirmed=confirmed_ts)

        with self.participant_dao.session() as session:
            session.add(bss)

        ps = self.summary_dao.get(participant.participantId)
        self.assertIsNone(ps.sampleStatusDV1SAL2)
        self.assertIsNone(ps.sampleStatusDV1SAL2Time)

        self.summary_dao.update_from_biobank_stored_samples()
        ps = self.summary_dao.get(participant.participantId)
        self.assertEqual(ps.sampleStatus1SAL2, SampleStatus.RECEIVED)
        self.assertEqual(ps.sampleStatus1SAL2Time, confirmed_ts)

    def test_end_to_end(self):
        dao = BiobankStoredSampleDao()
        self.assertEquals(dao.count(), 0)

        # Create 3 participants and pass their (random) IDs into sample rows.
        summary_dao = ParticipantSummaryDao()
        biobank_ids = []
        participant_ids = []
        nids = 16  # equal to the number of parent rows in 'biobank_samples_1.csv'
        cids = 1  # equal to the number of child rows in 'biobank_samples_1.csv'

        for _ in xrange(nids):
            participant = self.participant_dao.insert(Participant())
            summary_dao.insert(self.participant_summary(participant))
            participant_ids.append(participant.participantId)
            biobank_ids.append(participant.biobankId)
            self.assertEquals(
                summary_dao.get(
                    participant.participantId).numBaselineSamplesArrived, 0)

        test_codes = random.sample(_BASELINE_TESTS, nids)
        samples_file = test_data.open_biobank_samples(biobank_ids=biobank_ids,
                                                      tests=test_codes)
        lines = samples_file.split('\n')[1:]  # remove field name line

        input_filename = 'cloud%s.csv' % self._naive_utc_to_naive_central(
            clock.CLOCK.now()).strftime(
                biobank_samples_pipeline.INPUT_CSV_TIME_FORMAT)
        self._write_cloud_csv(input_filename, samples_file)
        biobank_samples_pipeline.upsert_from_latest_csv()

        self.assertEquals(dao.count(), nids - cids)

        for x in range(0, nids):
            cols = lines[x].split('\t')

            if cols[10].strip():  # skip child sample
                continue

            # If status is 'In Prep', then sample confirmed timestamp should be empty
            if cols[2] == 'In Prep':
                self.assertEquals(len(cols[11]), 0)
            else:
                status = SampleStatus.RECEIVED
                ts_str = cols[11]
                # DA-814 - Participant Summary test status should be: Unset, Received or Disposed only.
                # If sample is disposed, then check disposed timestamp, otherwise check confirmed timestamp.
                # DA-871 - Only check status is disposed when reason code is a bad disposal.
                if cols[2] == 'Disposed' and get_sample_status_enum_value(
                        cols[8]) > SampleStatus.UNKNOWN:
                    status = SampleStatus.DISPOSED
                    ts_str = cols[9]

                ts = datetime.datetime.strptime(ts_str, '%Y/%m/%d %H:%M:%S')
                self._check_summary(participant_ids[x], test_codes[x], ts,
                                    status)

    def test_old_csv_not_imported(self):
        now = clock.CLOCK.now()
        too_old_time = now - datetime.timedelta(hours=25)
        input_filename = 'cloud%s.csv' % self._naive_utc_to_naive_central(
            too_old_time).strftime(
                biobank_samples_pipeline.INPUT_CSV_TIME_FORMAT)
        self._write_cloud_csv(input_filename, '')
        with self.assertRaises(biobank_samples_pipeline.DataError):
            biobank_samples_pipeline.upsert_from_latest_csv()

    def _naive_utc_to_naive_central(self, naive_utc_date):
        utc_date = pytz.utc.localize(naive_utc_date)
        central_date = utc_date.astimezone(pytz.timezone('US/Central'))
        return central_date.replace(tzinfo=None)

    def _check_summary(self, participant_id, test, date_formatted, status):
        summary = ParticipantSummaryDao().get(participant_id)
        self.assertEquals(summary.numBaselineSamplesArrived, 1)
        # DA-614 - All specific disposal statuses in biobank_stored_samples are changed to DISPOSED
        # in the participant summary.
        self.assertEquals(status, getattr(summary, 'sampleStatus' + test))
        sample_time = self._naive_utc_to_naive_central(
            getattr(summary, 'sampleStatus' + test + 'Time'))
        self.assertEquals(date_formatted, sample_time)

    def test_find_latest_csv(self):
        # The cloud storage testbed does not expose an injectable time function.
        # Creation time is stored at second granularity.
        self._write_cloud_csv('a_lex_first_created_first.csv', 'any contents')
        time.sleep(1.0)
        self._write_cloud_csv('z_lex_last_created_middle.csv', 'any contents')
        time.sleep(1.0)
        created_last = 'b_lex_middle_created_last.csv'
        self._write_cloud_csv(created_last, 'any contents')
        self._write_cloud_csv(
            '%s/created_last_in_subdir.csv' %
            biobank_samples_pipeline._REPORT_SUBDIR, 'any contents')

        latest_filename = biobank_samples_pipeline._find_latest_samples_csv(
            _FAKE_BUCKET)
        self.assertEquals(latest_filename,
                          '/%s/%s' % (_FAKE_BUCKET, created_last))

    def test_sample_from_row(self):
        samples_file = test_data.open_biobank_samples([112, 222, 333], [])
        reader = csv.DictReader(StringIO.StringIO(samples_file),
                                delimiter='\t')
        row = reader.next()
        sample = biobank_samples_pipeline._create_sample_from_row(
            row, get_biobank_id_prefix())
        self.assertIsNotNone(sample)

        cols = biobank_samples_pipeline.CsvColumns
        self.assertEquals(sample.biobankStoredSampleId, row[cols.SAMPLE_ID])
        self.assertEquals(to_client_biobank_id(sample.biobankId),
                          row[cols.EXTERNAL_PARTICIPANT_ID])
        self.assertEquals(sample.test, row[cols.TEST_CODE])
        confirmed_date = self._naive_utc_to_naive_central(sample.confirmed)
        self.assertEquals(
            confirmed_date.strftime(
                biobank_samples_pipeline._INPUT_TIMESTAMP_FORMAT),
            row[cols.CONFIRMED_DATE])
        received_date = self._naive_utc_to_naive_central(sample.created)
        self.assertEquals(
            received_date.strftime(
                biobank_samples_pipeline._INPUT_TIMESTAMP_FORMAT),
            row[cols.CREATE_DATE])

    def test_sample_from_row_wrong_prefix(self):
        samples_file = test_data.open_biobank_samples([111, 222, 333], [])
        reader = csv.DictReader(StringIO.StringIO(samples_file),
                                delimiter='\t')
        row = reader.next()
        row[biobank_samples_pipeline.CsvColumns.CONFIRMED_DATE] = '2016 11 19'
        self.assertIsNone(
            biobank_samples_pipeline._create_sample_from_row(row, 'Q'))

    def test_sample_from_row_invalid(self):
        samples_file = test_data.open_biobank_samples([111, 222, 333], [])
        reader = csv.DictReader(StringIO.StringIO(samples_file),
                                delimiter='\t')
        row = reader.next()
        row[biobank_samples_pipeline.CsvColumns.CONFIRMED_DATE] = '2016 11 19'
        with self.assertRaises(biobank_samples_pipeline.DataError):
            biobank_samples_pipeline._create_sample_from_row(
                row, get_biobank_id_prefix())

    def test_sample_from_row_old_test(self):
        samples_file = test_data.open_biobank_samples([111, 222, 333], [])
        reader = csv.DictReader(StringIO.StringIO(samples_file),
                                delimiter='\t')
        row = reader.next()
        row[biobank_samples_pipeline.CsvColumns.TEST_CODE] = '2PST8'
        sample = biobank_samples_pipeline._create_sample_from_row(
            row, get_biobank_id_prefix())
        self.assertIsNotNone(sample)
        cols = biobank_samples_pipeline.CsvColumns
        self.assertEquals(sample.biobankStoredSampleId, row[cols.SAMPLE_ID])
        self.assertEquals(sample.test, row[cols.TEST_CODE])

    def test_column_missing(self):
        with open(test_data.data_path(
                'biobank_samples_missing_field.csv')) as samples_file:
            reader = csv.DictReader(samples_file, delimiter='\t')
            with self.assertRaises(biobank_samples_pipeline.DataError):
                biobank_samples_pipeline._upsert_samples_from_csv(reader)

    def test_get_reconciliation_report_paths(self):
        dt = datetime.datetime(2016, 12, 22, 18, 30, 45)
        expected_prefix = 'reconciliation/report_2016-12-22'
        paths = biobank_samples_pipeline._get_report_paths(dt)
        self.assertEquals(len(paths), 4)
        for path in paths:
            self.assertTrue(
                path.startswith(expected_prefix),
                'Report path %r must start with %r.' % (expected_prefix, path))
            self.assertTrue(path.endswith('.csv'))
Exemple #6
0
class GenomicSetValidationBaseTestCase(SqlTestBase):
    def setUp(self, with_data=True, use_mysql=False):
        super(GenomicSetValidationBaseTestCase,
              self).setUp(with_data=with_data, use_mysql=use_mysql)
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.genomic_set_dao = GenomicSetDao()
        self.genomic_member_dao = GenomicSetMemberDao()
        self._participant_i = 0
        self.setup_data()

    def setup_data(self):
        pass

    def make_participant(self, **kwargs):
        """
    Make a participant with custom settings.
    default should create a valid participant.
    """
        i = self._participant_i
        self._participant_i += 1
        participant = Participant(participantId=i, biobankId=i, **kwargs)
        self.participant_dao.insert(participant)
        return participant

    def make_summary(self, participant, **override_kwargs):
        """
    Make a summary with custom settings.
    default should create a valid summary.
    """
        valid_kwargs = dict(participantId=participant.participantId,
                            biobankId=participant.biobankId,
                            withdrawalStatus=participant.withdrawalStatus,
                            dateOfBirth=datetime.datetime(2000, 1, 1),
                            firstName='foo',
                            lastName='bar',
                            zipCode='12345',
                            sampleStatus1ED04=SampleStatus.RECEIVED,
                            sampleStatus1SAL2=SampleStatus.RECEIVED,
                            samplesToIsolateDNA=SampleStatus.RECEIVED,
                            consentForStudyEnrollmentTime=datetime.datetime(
                                2019, 1, 1))
        kwargs = dict(valid_kwargs, **override_kwargs)
        summary = self._participant_summary_with_defaults(**kwargs)
        self.summary_dao.insert(summary)
        return summary

    def make_genomic_set(self, **override_kwargs):
        """
    Make a genomic set with custom settings.
    default should create a valid set.
    """
        valid_kwargs = dict(genomicSetName='foo',
                            genomicSetCriteria='something',
                            genomicSetVersion=1,
                            genomicSetStatus=GenomicSetStatus.UNSET)
        kwargs = dict(valid_kwargs, **override_kwargs)
        genomic_set = GenomicSet(**kwargs)
        self.genomic_set_dao.insert(genomic_set)
        return genomic_set

    def make_genomic_member(self, genomic_set, participant, **override_kwargs):
        """
    Make a genomic member with custom settings.
    default should create a valid member.
    """
        valid_kwargs = dict(genomicSetId=genomic_set.id,
                            participantId=participant.participantId,
                            sexAtBirth='F',
                            biobankId=participant.biobankId,
                            biobankOrderClientId='12345678')
        kwargs = dict(valid_kwargs, **override_kwargs)
        member = GenomicSetMember(**kwargs)
        self.genomic_member_dao.insert(member)
        return member
Exemple #7
0
class PhysicalMeasurementsDaoTest(SqlTestBase):
  def setUp(self):
    super(PhysicalMeasurementsDaoTest, self).setUp()
    self.participant = Participant(participantId=1, biobankId=2)
    ParticipantDao().insert(self.participant)
    self.dao = PhysicalMeasurementsDao()
    self.participant_summary_dao = ParticipantSummaryDao()
    self.measurement_json = json.dumps(load_measurement_json(self.participant.participantId,
                                                             TIME_1.isoformat()))
    self.biobank = BiobankOrderDao()

  def test_from_client_json(self):
    measurement = PhysicalMeasurementsDao.from_client_json(json.loads(self.measurement_json))
    self.assertIsNotNone(measurement.createdSiteId)
    self.assertIsNotNone(measurement.finalizedSiteId)

  def _make_physical_measurements(self, **kwargs):
    """Makes a new PhysicalMeasurements (same values every time) with valid/complete defaults.

    Kwargs pass through to PM constructor, overriding defaults.
    """
    for k, default_value in (
        ('physicalMeasurementsId', 1),
        ('participantId', self.participant.participantId),
        ('resource', self.measurement_json),
        ('createdSiteId', 1),
        ('finalizedSiteId', 2)):
      if k not in kwargs:
        kwargs[k] = default_value
    return PhysicalMeasurements(**kwargs)

  def testInsert_noParticipantId(self):
    with self.assertRaises(BadRequest):
      self.dao.insert(self._make_physical_measurements(participantId=None))

  def testInsert_wrongParticipantId(self):
    with self.assertRaises(BadRequest):
      self.dao.insert(self._make_physical_measurements(participantId=2))

  def _with_id(self, resource, id_):
    measurements_json = json.loads(resource)
    measurements_json['id'] = id_
    return json.dumps(measurements_json)

  def testInsert_noSummary(self):
    with self.assertRaises(BadRequest):
      self.dao.insert(self._make_physical_measurements())

  def _make_summary(self):
    self.participant_summary_dao.insert(self.participant_summary(self.participant))

  def testInsert_rightParticipantId(self):
    self._make_summary()
    summary = ParticipantSummaryDao().get(self.participant.participantId)
    self.assertIsNone(summary.physicalMeasurementsStatus)
    with FakeClock(TIME_2):
      measurements = self.dao.insert(self._make_physical_measurements())

    expected_measurements = PhysicalMeasurements(
        physicalMeasurementsId=1,
        participantId=self.participant.participantId,
        resource=self._with_id(self.measurement_json, '1'),
        created=TIME_2,
        finalized=TIME_1,
        final=True,
        logPositionId=1,
        createdSiteId=1,
        finalizedSiteId=2)
    self.assertEquals(expected_measurements.asdict(), measurements.asdict())
    measurements = self.dao.get(measurements.physicalMeasurementsId)
    self.assertEquals(expected_measurements.asdict(), measurements.asdict())
    # Completing physical measurements changes the participant summary status
    summary = ParticipantSummaryDao().get(self.participant.participantId)
    self.assertEquals(PhysicalMeasurementsStatus.COMPLETED, summary.physicalMeasurementsStatus)
    self.assertEquals(TIME_2, summary.physicalMeasurementsTime)
    self.assertEquals(TIME_2, summary.lastModified)

  def test_backfill_is_noop(self):
    self._make_summary()
    measurements_id = self.dao.insert(self._make_physical_measurements()).physicalMeasurementsId
    orig_measurements = self.dao.get_with_children(measurements_id).asdict()
    self.dao.backfill_measurements()
    backfilled_measurements = self.dao.get_with_children(measurements_id).asdict()
    # Formatting of resource gets changed, so test it separately as parsed JSON.
    self.assertEquals(
        json.loads(orig_measurements['resource']),
        json.loads(backfilled_measurements['resource']))
    del orig_measurements['resource']
    del backfilled_measurements['resource']
    self.assertEquals(orig_measurements, backfilled_measurements)

  def testInsert_withdrawnParticipantFails(self):
    self.participant.withdrawalStatus = WithdrawalStatus.NO_USE
    ParticipantDao().update(self.participant)
    self._make_summary()
    summary = ParticipantSummaryDao().get(self.participant.participantId)
    self.assertIsNone(summary.physicalMeasurementsStatus)
    with self.assertRaises(Forbidden):
      self.dao.insert(self._make_physical_measurements())

  def testInsert_getFailsForWithdrawnParticipant(self):
    self._make_summary()
    self.dao.insert(self._make_physical_measurements())
    self.participant.version += 1
    self.participant.withdrawalStatus = WithdrawalStatus.NO_USE
    ParticipantDao().update(self.participant)
    with self.assertRaises(Forbidden):
      self.dao.get(1)
    with self.assertRaises(Forbidden):
      self.dao.query(Query([FieldFilter('participantId', Operator.EQUALS,
                                        self.participant.participantId)],
                           None, 10, None))

  def testInsert_duplicate(self):
    self._make_summary()
    with FakeClock(TIME_2):
      measurements = self.dao.insert(self._make_physical_measurements())
    with FakeClock(TIME_3):
      measurements_2 = self.dao.insert(self._make_physical_measurements())
    self.assertEquals(measurements.asdict(), measurements_2.asdict())

  def testInsert_amend(self):
    self._make_summary()
    with FakeClock(TIME_2):
      measurements = self.dao.insert(self._make_physical_measurements(
          physicalMeasurementsId=1))

    amendment_json = load_measurement_json_amendment(self.participant.participantId,
                                                     measurements.physicalMeasurementsId,
                                                     TIME_2)
    with FakeClock(TIME_3):
      new_measurements = self.dao.insert(self._make_physical_measurements(
          physicalMeasurementsId=2, resource=json.dumps(amendment_json)))

    measurements = self.dao.get(measurements.physicalMeasurementsId)
    amended_json = json.loads(measurements.resource)
    self.assertEquals('amended', amended_json['entry'][0]['resource']['status'])
    self.assertEquals('1', amended_json['id'])

    amendment_json = json.loads(new_measurements.resource)
    self.assertEquals('2', amendment_json['id'])
    self.assertTrue(new_measurements.final)
    self.assertEquals(TIME_3, new_measurements.created)
class ParticipantSummaryDaoTest(NdbTestBase):
    def setUp(self):
        super(ParticipantSummaryDaoTest, self).setUp(use_mysql=True)
        self.dao = ParticipantSummaryDao()
        self.participant_dao = ParticipantDao()
        self.no_filter_query = Query([], None, 2, None)
        self.one_filter_query = Query(
            [FieldFilter("participantId", Operator.EQUALS, 1)], None, 2, None)
        self.two_filter_query = Query([
            FieldFilter("participantId", Operator.EQUALS, 1),
            FieldFilter("hpoId", Operator.EQUALS, PITT_HPO_ID)
        ], None, 2, None)
        self.ascending_biobank_id_query = Query([], OrderBy("biobankId", True),
                                                2, None)
        self.descending_biobank_id_query = Query([],
                                                 OrderBy("biobankId",
                                                         False), 2, None)
        self.enrollment_status_order_query = Query([],
                                                   OrderBy(
                                                       "enrollmentStatus",
                                                       True), 2, None)
        self.hpo_id_order_query = Query([], OrderBy("hpoId", True), 2, None)
        self.first_name_order_query = Query([], OrderBy("firstName", True), 2,
                                            None)

    def assert_no_results(self, query):
        results = self.dao.query(query)
        self.assertEquals([], results.items)
        self.assertIsNone(results.pagination_token)

    def assert_results(self, query, items, pagination_token=None):
        results = self.dao.query(query)
        self.assertListAsDictEquals(items, results.items)
        self.assertEquals(
            pagination_token, results.pagination_token,
            "Pagination tokens don't match; decoded = %s, %s" %
            (_decode_token(pagination_token),
             _decode_token(results.pagination_token)))

    def test_query_with_total(self):
        num_participants = 5
        query = Query([], None, 10, None, include_total=True)
        results = self.dao.query(query)
        self.assertEqual(results.total, 0)
        for i in range(num_participants):
            participant = Participant(participantId=i, biobankId=i)
            self._insert(participant)
        results = self.dao.query(query)
        self.assertEqual(results.total, num_participants)

    def testQuery_noSummaries(self):
        self.assert_no_results(self.no_filter_query)
        self.assert_no_results(self.one_filter_query)
        self.assert_no_results(self.two_filter_query)
        self.assert_no_results(self.ascending_biobank_id_query)
        self.assert_no_results(self.descending_biobank_id_query)

    def _insert(self, participant, first_name=None, last_name=None):
        self.participant_dao.insert(participant)
        summary = self.participant_summary(participant)
        if first_name:
            summary.firstName = first_name
        if last_name:
            summary.lastName = last_name
        self.dao.insert(summary)
        return participant

    def testQuery_oneSummary(self):
        participant = Participant(participantId=1, biobankId=2)
        self._insert(participant)
        summary = self.dao.get(1)
        self.assert_results(self.no_filter_query, [summary])
        self.assert_results(self.one_filter_query, [summary])
        self.assert_no_results(self.two_filter_query)
        self.assert_results(self.ascending_biobank_id_query, [summary])
        self.assert_results(self.descending_biobank_id_query, [summary])

    def testUnicodeNameRoundTrip(self):
        name = self.fake.first_name()
        with self.assertRaises(UnicodeEncodeError):
            str(name)  # sanity check that the name contains non-ASCII
        participant = self._insert(Participant(participantId=1, biobankId=2))
        summary = self.dao.get(participant.participantId)
        summary.firstName = name
        self.dao.update(summary)
        fetched_summary = self.dao.get(participant.participantId)
        self.assertEquals(name, fetched_summary.firstName)

    def testQuery_twoSummaries(self):
        participant_1 = Participant(participantId=1, biobankId=2)
        self._insert(participant_1, 'Alice', 'Smith')
        participant_2 = Participant(participantId=2, biobankId=1)
        self._insert(participant_2, 'Zed', 'Zebra')
        ps_1 = self.dao.get(1)
        ps_2 = self.dao.get(2)
        self.assert_results(self.no_filter_query, [ps_1, ps_2])
        self.assert_results(self.one_filter_query, [ps_1])
        self.assert_no_results(self.two_filter_query)
        self.assert_results(self.ascending_biobank_id_query, [ps_2, ps_1])
        self.assert_results(self.descending_biobank_id_query, [ps_1, ps_2])

    def testQuery_threeSummaries_paginate(self):
        participant_1 = Participant(participantId=1, biobankId=4)
        self._insert(participant_1, 'Alice', 'Aardvark')
        participant_2 = Participant(participantId=2, biobankId=1)
        self._insert(participant_2, 'Bob', 'Builder')
        participant_3 = Participant(participantId=3, biobankId=3)
        self._insert(participant_3, 'Chad', 'Caterpillar')
        ps_1 = self.dao.get(1)
        ps_2 = self.dao.get(2)
        ps_3 = self.dao.get(3)
        self.assert_results(
            self.no_filter_query, [ps_1, ps_2],
            _make_pagination_token(['Builder', 'Bob', None, 2]))
        self.assert_results(self.one_filter_query, [ps_1])
        self.assert_no_results(self.two_filter_query)
        self.assert_results(
            self.ascending_biobank_id_query, [ps_2, ps_3],
            _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3]))
        self.assert_results(
            self.descending_biobank_id_query, [ps_1, ps_3],
            _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3]))

        self.assert_results(
            _with_token(self.no_filter_query,
                        _make_pagination_token(['Builder', 'Bob', None, 2])),
            [ps_3])
        self.assert_results(
            _with_token(
                self.ascending_biobank_id_query,
                _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3])),
            [ps_1])
        self.assert_results(
            _with_token(
                self.descending_biobank_id_query,
                _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3])),
            [ps_2])

    def testQuery_fourFullSummaries_paginate(self):
        participant_1 = Participant(participantId=1, biobankId=4)
        self._insert(participant_1, 'Bob', 'Jones')
        participant_2 = Participant(participantId=2, biobankId=1)
        self._insert(participant_2, 'Bob', 'Jones')
        participant_3 = Participant(participantId=3, biobankId=3)
        self._insert(participant_3, 'Bob', 'Jones')
        participant_4 = Participant(participantId=4, biobankId=2)
        self._insert(participant_4, 'Bob', 'Jones')
        ps_1 = self.dao.get(1)
        ps_2 = self.dao.get(2)
        ps_3 = self.dao.get(3)
        ps_4 = self.dao.get(4)

        ps_1.lastName = 'Jones'
        ps_1.firstName = 'Bob'
        ps_1.dateOfBirth = datetime.date(1978, 10, 9)
        ps_1.hpoId = PITT_HPO_ID
        self.dao.update(ps_1)

        ps_2.lastName = 'Aardvark'
        ps_2.firstName = 'Bob'
        ps_2.dateOfBirth = datetime.date(1978, 10, 10)
        ps_2.enrollmentStatus = EnrollmentStatus.MEMBER
        self.dao.update(ps_2)

        ps_3.lastName = 'Jones'
        ps_3.firstName = 'Bob'
        ps_3.dateOfBirth = datetime.date(1978, 10, 10)
        ps_3.hpoId = PITT_HPO_ID
        ps_3.enrollmentStatus = EnrollmentStatus.MEMBER
        self.dao.update(ps_3)

        ps_4.lastName = 'Jones'
        ps_4.enrollmentStatus = EnrollmentStatus.FULL_PARTICIPANT
        self.dao.update(ps_4)

        self.assert_results(self.no_filter_query, [ps_2, ps_4],
                            _make_pagination_token(['Jones', 'Bob', None, 4]))
        self.assert_results(self.one_filter_query, [ps_1])
        self.assert_results(self.two_filter_query, [ps_1])
        self.assert_results(
            self.ascending_biobank_id_query, [ps_2, ps_4],
            _make_pagination_token([2, 'Jones', 'Bob', None, 4]))
        self.assert_results(
            self.descending_biobank_id_query, [ps_1, ps_3],
            _make_pagination_token(
                [3, 'Jones', 'Bob',
                 datetime.date(1978, 10, 10), 3]))
        self.assert_results(
            self.hpo_id_order_query, [ps_2, ps_4],
            _make_pagination_token([0, 'Jones', 'Bob', None, 4]))
        self.assert_results(
            self.enrollment_status_order_query, [ps_1, ps_2],
            _make_pagination_token(
                ['MEMBER', 'Aardvark', 'Bob',
                 datetime.date(1978, 10, 10), 2]))

        self.assert_results(
            _with_token(self.no_filter_query,
                        _make_pagination_token(['Jones', 'Bob', None, 4])),
            [ps_1, ps_3])
        self.assert_results(
            _with_token(self.ascending_biobank_id_query,
                        _make_pagination_token([2, 'Jones', 'Bob', None, 4])),
            [ps_3, ps_1])
        self.assert_results(
            _with_token(
                self.descending_biobank_id_query,
                _make_pagination_token(
                    [3, 'Jones', 'Bob',
                     datetime.date(1978, 10, 10), 3])), [ps_4, ps_2])
        self.assert_results(
            _with_token(self.hpo_id_order_query,
                        _make_pagination_token([0, 'Jones', 'Bob', None, 4])),
            [ps_1, ps_3])
        self.assert_results(
            _with_token(
                self.enrollment_status_order_query,
                _make_pagination_token([
                    'MEMBER', 'Aardvark', 'Bob',
                    datetime.date(1978, 10, 10), 2
                ])), [ps_3, ps_4])

    def test_update_from_samples(self):
        # baseline_tests = ['BASELINE1', 'BASELINE2']
        baseline_tests = ["1PST8", "2PST8"]

        config.override_setting(config.BASELINE_SAMPLE_TEST_CODES,
                                baseline_tests)
        self.dao.update_from_biobank_stored_samples()  # safe noop

        p_baseline_samples = self._insert(
            Participant(participantId=1, biobankId=11))
        p_mixed_samples = self._insert(
            Participant(participantId=2, biobankId=22))
        p_no_samples = self._insert(Participant(participantId=3, biobankId=33))
        p_unconfirmed = self._insert(Participant(participantId=4,
                                                 biobankId=44))
        self.assertEquals(
            self.dao.get(
                p_baseline_samples.participantId).numBaselineSamplesArrived, 0)

        def get_p_baseline_last_modified():
            return self.dao.get(p_baseline_samples.participantId).lastModified

        p_baseline_last_modified1 = get_p_baseline_last_modified()

        sample_dao = BiobankStoredSampleDao()

        def add_sample(participant, test_code, sample_id):
            TIME = datetime.datetime(2018, 3, 2)
            sample_dao.insert(
                BiobankStoredSample(biobankStoredSampleId=sample_id,
                                    biobankId=participant.biobankId,
                                    biobankOrderIdentifier='KIT',
                                    test=test_code,
                                    confirmed=TIME))

        add_sample(p_baseline_samples, baseline_tests[0], '11111')
        add_sample(p_baseline_samples, baseline_tests[1], '22223')
        add_sample(p_mixed_samples, baseline_tests[0], '11112')
        add_sample(p_mixed_samples, 'NOT1', '44441')
        # add unconfirmed sample
        sample_dao.insert(
            BiobankStoredSample(biobankStoredSampleId=55555,
                                biobankId=p_unconfirmed.biobankId,
                                biobankOrderIdentifier='KIT',
                                test=baseline_tests[1],
                                confirmed=None))
        # sleep 1 sec to make lastModified different
        time.sleep(1)
        self.dao.update_from_biobank_stored_samples()

        p_baseline_last_modified2 = get_p_baseline_last_modified()
        self.assertNotEquals(p_baseline_last_modified2,
                             p_baseline_last_modified1)

        self.assertEquals(
            self.dao.get(
                p_baseline_samples.participantId).numBaselineSamplesArrived, 2)
        self.assertEquals(
            self.dao.get(
                p_mixed_samples.participantId).numBaselineSamplesArrived, 1)
        self.assertEquals(
            self.dao.get(p_no_samples.participantId).numBaselineSamplesArrived,
            0)
        self.assertEquals(
            self.dao.get(
                p_unconfirmed.participantId).numBaselineSamplesArrived, 0)

        M_baseline_samples = self._insert(
            Participant(participantId=9, biobankId=99))
        add_sample(M_baseline_samples, baseline_tests[0], '999')
        M_first_update = self.dao.get(M_baseline_samples.participantId)
        # sleep 1 sec to make lastModified different
        time.sleep(1)
        self.dao.update_from_biobank_stored_samples()
        add_sample(M_baseline_samples, baseline_tests[1], '9999')
        M_second_update = self.dao.get(M_baseline_samples.participantId)
        # sleep 1 sec to make lastModified different
        time.sleep(1)
        self.dao.update_from_biobank_stored_samples()

        self.assertNotEqual(M_first_update.lastModified,
                            M_second_update.lastModified)
        self.assertEquals(get_p_baseline_last_modified(),
                          p_baseline_last_modified2)

    def test_update_from_samples_changed_tests(self):
        baseline_tests = ["1PST8", "2PST8"]
        config.override_setting(config.BASELINE_SAMPLE_TEST_CODES,
                                baseline_tests)
        self.dao.update_from_biobank_stored_samples()  # safe noop

        participant = self._insert(Participant(participantId=1, biobankId=11))
        self.assertEquals(
            self.dao.get(participant.participantId).numBaselineSamplesArrived,
            0)

        sample_dao = BiobankStoredSampleDao()

        def add_sample(test_code, sample_id):
            TIME = datetime.datetime(2018, 3, 2)
            sample_dao.insert(
                BiobankStoredSample(biobankStoredSampleId=sample_id,
                                    biobankId=participant.biobankId,
                                    biobankOrderIdentifier='KIT',
                                    test=test_code,
                                    confirmed=TIME))

        add_sample(baseline_tests[0], '11111')
        add_sample(baseline_tests[1], '22223')
        self.dao.update_from_biobank_stored_samples()
        summary = self.dao.get(participant.participantId)
        init_last_modified = summary.lastModified
        self.assertEquals(summary.numBaselineSamplesArrived, 2)
        # sleep 1 sec to make lastModified different
        time.sleep(1)
        # Simulate removal of one of the baseline tests from config.json.
        baseline_tests.pop()
        config.override_setting(config.BASELINE_SAMPLE_TEST_CODES,
                                baseline_tests)
        self.dao.update_from_biobank_stored_samples()

        summary = self.dao.get(participant.participantId)
        self.assertEquals(summary.numBaselineSamplesArrived, 1)
        self.assertNotEqual(init_last_modified, summary.lastModified)

    def test_only_update_dna_sample(self):
        dna_tests = ["1ED10", "1SAL2"]

        config.override_setting(config.DNA_SAMPLE_TEST_CODES, dna_tests)
        self.dao.update_from_biobank_stored_samples()  # safe noop

        p_dna_samples = self._insert(Participant(participantId=1,
                                                 biobankId=11))

        self.assertEquals(
            self.dao.get(p_dna_samples.participantId).samplesToIsolateDNA,
            None)
        self.assertEquals(
            self.dao.get(p_dna_samples.participantId).
            enrollmentStatusCoreStoredSampleTime, None)
        self.assertEquals(
            self.dao.get(p_dna_samples.participantId).
            enrollmentStatusCoreOrderedSampleTime, None)

        sample_dao = BiobankStoredSampleDao()

        def add_sample(participant, test_code, sample_id, confirmed_time):
            sample_dao.insert(
                BiobankStoredSample(biobankStoredSampleId=sample_id,
                                    biobankId=participant.biobankId,
                                    biobankOrderIdentifier='KIT',
                                    test=test_code,
                                    confirmed=confirmed_time))

        confirmed_time_0 = datetime.datetime(2018, 3, 1)
        add_sample(p_dna_samples, dna_tests[0], '11111', confirmed_time_0)

        self.dao.update_from_biobank_stored_samples()

        self.assertEquals(
            self.dao.get(p_dna_samples.participantId).samplesToIsolateDNA,
            SampleStatus.RECEIVED)
        # only update dna sample will not update enrollmentStatusCoreStoredSampleTime
        self.assertEquals(
            self.dao.get(p_dna_samples.participantId).
            enrollmentStatusCoreStoredSampleTime, None)
        self.assertEquals(
            self.dao.get(p_dna_samples.participantId).
            enrollmentStatusCoreOrderedSampleTime, None)

    def test_calculate_enrollment_status(self):
        self.assertEquals(
            EnrollmentStatus.FULL_PARTICIPANT,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.RECEIVED))
        self.assertEquals(
            EnrollmentStatus.MEMBER,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES - 1,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.RECEIVED))
        self.assertEquals(
            EnrollmentStatus.MEMBER,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.UNSET, SampleStatus.RECEIVED))
        self.assertEquals(
            EnrollmentStatus.MEMBER,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.UNSET))
        self.assertEquals(
            EnrollmentStatus.INTERESTED,
            self.dao.calculate_enrollment_status(
                False, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.RECEIVED))

    def testUpdateEnrollmentStatus(self):
        ehr_consent_time = datetime.datetime(2018, 3, 1)
        summary = ParticipantSummary(
            participantId=1,
            biobankId=2,
            consentForStudyEnrollment=QuestionnaireStatus.SUBMITTED,
            consentForElectronicHealthRecords=QuestionnaireStatus.SUBMITTED,
            consentForElectronicHealthRecordsTime=ehr_consent_time,
            enrollmentStatus=EnrollmentStatus.INTERESTED)
        self.dao.update_enrollment_status(summary)
        self.assertEquals(EnrollmentStatus.MEMBER, summary.enrollmentStatus)
        self.assertEquals(ehr_consent_time, summary.enrollmentStatusMemberTime)
Exemple #9
0
class BiobankOrderApiTest(FlaskTestBase):
  def setUp(self):
    super(BiobankOrderApiTest, self).setUp()
    self.participant = Participant(participantId=123, biobankId=555)
    self.participant_dao = ParticipantDao()
    self.participant_dao.insert(self.participant)
    self.summary_dao = ParticipantSummaryDao()
    self.path = (
        'Participant/%s/BiobankOrder' % to_client_participant_id(self.participant.participantId))

  def test_insert_and_refetch(self):
    self.summary_dao.insert(self.participant_summary(self.participant))
    self.create_and_verify_created_obj(
        self.path, load_biobank_order_json(self.participant.participantId))

  def test_insert_new_order(self):
    self.summary_dao.insert(self.participant_summary(self.participant))
    order_json = load_biobank_order_json(self.participant.participantId,
                                         filename='biobank_order_2.json')
    result = self.send_post(self.path, order_json)
    full_order_json = load_biobank_order_json(self.participant.participantId,
                                              filename='biobank_order_1.json')
    _strip_fields(result)
    _strip_fields(full_order_json)
    self.assertEquals(full_order_json, result)

  def test_error_no_summary(self):
    order_json = load_biobank_order_json(self.participant.participantId)
    self.send_post(self.path, order_json, expected_status=httplib.BAD_REQUEST)

  def test_error_missing_required_fields(self):
    order_json = load_biobank_order_json(self.participant.participantId)
    del order_json['identifier']
    self.send_post(self.path, order_json, expected_status=httplib.BAD_REQUEST)

  def test_no_duplicate_test_within_order(self):
    order_json = load_biobank_order_json(self.participant.participantId)
    order_json['samples'].extend(list(order_json['samples']))
    self.send_post(self.path, order_json, expected_status=httplib.BAD_REQUEST)

  def test_auto_pair_updates_participant_and_summary(self):
    self.summary_dao.insert(self.participant_summary(self.participant))

    # Sanity check: No HPO yet.
    p_unpaired = self.participant_dao.get(self.participant.participantId)
    self.assertEquals(p_unpaired.hpoId, UNSET_HPO_ID)
    self.assertIsNone(p_unpaired.providerLink)
    s_unpaired = self.summary_dao.get(self.participant.participantId)
    self.assertEquals(s_unpaired.hpoId, UNSET_HPO_ID)

    self.send_post(self.path, load_biobank_order_json(self.participant.participantId))

    # Some HPO has been set. (ParticipantDao tests cover more detailed cases / specific values.)
    p_paired = self.participant_dao.get(self.participant.participantId)
    self.assertNotEqual(p_paired.hpoId, UNSET_HPO_ID)
    self.assertIsNotNone(p_paired.providerLink)

    s_paired = self.summary_dao.get(self.participant.participantId)

    self.assertNotEqual(s_paired.hpoId, UNSET_HPO_ID)
    self.assertEqual(s_paired.biospecimenCollectedSiteId, s_paired.siteId)
    self.assertNotEqual(s_paired.biospecimenCollectedSiteId, s_paired.biospecimenFinalizedSiteId)

    self.assertNotEqual(s_paired.siteId, s_paired.physicalMeasurementsCreatedSiteId )
    self.assertNotEqual(s_paired.siteId, s_paired.physicalMeasurementsFinalizedSiteId )

  def test_not_pairing_at_pm_when_has_bio(self):
    self.participant_id = self.create_participant()
    _id = int(self.participant_id[1:])
    self.path = (
      'Participant/%s/BiobankOrder' % to_client_participant_id(_id))
    pid_numeric = from_client_participant_id(self.participant_id)
    self.send_consent(self.participant_id)
    self.send_post(self.path, load_biobank_order_json(pid_numeric))
    participant_paired = self.summary_dao.get(pid_numeric)

    self.assertEqual(participant_paired.siteId, participant_paired.biospecimenCollectedSiteId)
    self.path = (
      'Participant/%s/PhysicalMeasurements' % to_client_participant_id(pid_numeric))
    self._insert_measurements(datetime.datetime.utcnow().isoformat())
    self.assertNotEqual(participant_paired.siteId,
                        participant_paired.physicalMeasurementsFinalizedSiteId)

  def _insert_measurements(self, now=None):
    measurements_1 = load_measurement_json(self.participant_id, now)
    path_1 = 'Participant/%s/PhysicalMeasurements' % self.participant_id
    self.send_post(path_1, measurements_1)
Exemple #10
0
class MySqlReconciliationTest(FlaskTestBase):
    """Biobank samples pipeline tests requiring slower MySQL (not SQLite)."""
    def setUp(self):
        super(MySqlReconciliationTest, self).setUp(use_mysql=True)
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.order_dao = BiobankOrderDao()
        self.sample_dao = BiobankStoredSampleDao()

    def _withdraw(self, participant, withdrawal_time):
        with FakeClock(withdrawal_time):
            participant.withdrawalStatus = WithdrawalStatus.NO_USE
            self.participant_dao.update(participant)

    def _insert_participant(self, race_codes=[]):
        participant = self.participant_dao.insert(Participant())
        # satisfies the consent requirement
        self.summary_dao.insert(self.participant_summary(participant))

        if race_codes:
            self._submit_race_questionnaire_response(
                to_client_participant_id(participant.participantId),
                race_codes)
        return participant

    def _insert_order(self,
                      participant,
                      order_id,
                      tests,
                      order_time,
                      finalized_tests=None,
                      kit_id=None,
                      tracking_number=None):
        order = BiobankOrder(biobankOrderId=order_id,
                             participantId=participant.participantId,
                             sourceSiteId=1,
                             finalizedSiteId=1,
                             finalizedUsername='******',
                             created=order_time,
                             samples=[])
        id_1 = BiobankOrderIdentifier(
            system="https://orders.mayomedicallaboratories.com",
            value=order_id)
        id_2 = BiobankOrderIdentifier(system="https://www.pmi-ops.org",
                                      value='O%s' % order_id)
        order.identifiers.append(id_1)
        order.identifiers.append(id_2)
        if kit_id:
            order.identifiers.append(
                BiobankOrderIdentifier(system=_KIT_ID_SYSTEM, value=kit_id))
        if tracking_number:
            order.identifiers.append(
                BiobankOrderIdentifier(system=_TRACKING_NUMBER_SYSTEM,
                                       value=tracking_number))
        for test_code in tests:
            finalized_time = order_time
            if finalized_tests and not test_code in finalized_tests:
                finalized_time = None
            order.samples.append(
                BiobankOrderedSample(biobankOrderId=order.biobankOrderId,
                                     test=test_code,
                                     description=u'test',
                                     processingRequired=False,
                                     collected=order_time,
                                     processed=order_time,
                                     finalized=finalized_time))
        return self.order_dao.insert(order)

    def _insert_samples(self, participant, tests, sample_ids, confirmed_time,
                        created_time):
        for test_code, sample_id in zip(tests, sample_ids):
            self.sample_dao.insert(
                BiobankStoredSample(biobankStoredSampleId=sample_id,
                                    biobankId=participant.biobankId,
                                    test=test_code,
                                    confirmed=confirmed_time,
                                    created=created_time))

    def _submit_race_questionnaire_response(self, participant_id,
                                            race_answers):
        code_answers = []
        for answer in race_answers:
            _add_code_answer(code_answers, "race", answer)
        qr = make_questionnaire_response_json(participant_id,
                                              self._questionnaire_id,
                                              code_answers=code_answers)
        self.send_post('Participant/%s/QuestionnaireResponse' % participant_id,
                       qr)

    def test_reconciliation_query(self):
        self.setup_codes([RACE_QUESTION_CODE], CodeType.QUESTION)
        self.setup_codes([RACE_AIAN_CODE, RACE_WHITE_CODE], CodeType.ANSWER)
        self._questionnaire_id = self.create_questionnaire(
            'questionnaire3.json')
        # MySQL and Python sub-second rounding differs, so trim micros from generated times.
        order_time = clock.CLOCK.now().replace(microsecond=0)
        old_order_time = order_time - datetime.timedelta(days=10)
        within_24_hours = order_time + datetime.timedelta(hours=23)
        old_within_24_hours = old_order_time + datetime.timedelta(hours=23)
        late_time = order_time + datetime.timedelta(hours=25)
        old_late_time = old_order_time + datetime.timedelta(hours=25)
        file_time = order_time + datetime.timedelta(
            hours=23) + datetime.timedelta(minutes=59)
        two_days_ago = file_time - datetime.timedelta(days=2)

        # On time, recent order and samples; shows up in rx
        p_on_time = self._insert_participant()
        # Extra samples ordered now aren't considered missing or late.
        self._insert_order(p_on_time,
                           'GoodOrder',
                           BIOBANK_TESTS[:4],
                           order_time,
                           finalized_tests=BIOBANK_TESTS[:3],
                           kit_id='kit1',
                           tracking_number='t1')
        self._insert_samples(p_on_time, BIOBANK_TESTS[:2],
                             ['GoodSample1', 'GoodSample2'], within_24_hours,
                             within_24_hours - datetime.timedelta(hours=1))

        # On time order and samples from 10 days ago; shows up in rx
        p_old_on_time = self._insert_participant(race_codes=[RACE_AIAN_CODE])
        # Old missing samples from 10 days ago don't show up in missing or late.
        self._insert_order(p_old_on_time,
                           'OldGoodOrder',
                           BIOBANK_TESTS[:3],
                           old_order_time,
                           kit_id='kit2')
        self._insert_samples(p_old_on_time, BIOBANK_TESTS[:2],
                             ['OldGoodSample1', 'OldGoodSample2'],
                             old_within_24_hours,
                             old_within_24_hours - datetime.timedelta(hours=1))

        # Late, recent order and samples; shows up in rx and late. (But not missing, as it hasn't been
        # 24 hours since the order.)
        p_late_and_missing = self._insert_participant()
        # Extra missing sample doesn't show up as missing as it hasn't been 24 hours yet.
        o_late_and_missing = self._insert_order(p_late_and_missing,
                                                'SlowOrder', BIOBANK_TESTS[:3],
                                                order_time)
        self._insert_samples(p_late_and_missing, [BIOBANK_TESTS[0]],
                             ['LateSample'], late_time,
                             late_time - datetime.timedelta(minutes=59))

        # Late order and samples from 10 days ago; shows up in rx (but not missing, as it was too
        # long ago.
        p_old_late_and_missing = self._insert_participant()
        self._insert_order(p_old_late_and_missing, 'OldSlowOrder',
                           BIOBANK_TESTS[:2], old_order_time)
        self._insert_samples(p_old_late_and_missing, [BIOBANK_TESTS[0]],
                             ['OldLateSample'], old_late_time,
                             old_late_time - datetime.timedelta(minutes=59))

        # Order with missing sample from 2 days ago; shows up in rx and missing.
        p_two_days_missing = self._insert_participant()
        # The third test doesn't wind up in missing, as it was never finalized.
        self._insert_order(p_two_days_missing,
                           'TwoDaysMissingOrder',
                           BIOBANK_TESTS[:3],
                           two_days_ago,
                           finalized_tests=BIOBANK_TESTS[:2])

        # Recent samples with no matching order; shows up in missing.
        p_extra = self._insert_participant(race_codes=[RACE_WHITE_CODE])
        self._insert_samples(p_extra, [BIOBANK_TESTS[-1]],
                             ['NobodyOrderedThisSample'], order_time,
                             order_time - datetime.timedelta(minutes=59))

        # Old samples with no matching order; shows up in rx.
        p_old_extra = self._insert_participant(race_codes=[RACE_AIAN_CODE])
        self._insert_samples(p_old_extra, [BIOBANK_TESTS[-1]],
                             ['OldNobodyOrderedThisSample'], old_order_time,
                             old_order_time - datetime.timedelta(hours=1))

        # Withdrawn participants don't show up in any reports except withdrawal report.

        p_withdrawn_old_on_time = self._insert_participant(
            race_codes=[RACE_AIAN_CODE])
        # This updates the version of the participant and its HPO ID.
        self._insert_order(p_withdrawn_old_on_time, 'OldWithdrawnGoodOrder',
                           BIOBANK_TESTS[:2], old_order_time)
        p_withdrawn_old_on_time = self.participant_dao.get(
            p_withdrawn_old_on_time.participantId)
        self._insert_samples(
            p_withdrawn_old_on_time, BIOBANK_TESTS[:2],
            ['OldWithdrawnGoodSample1', 'OldWithdrawnGoodSample2'],
            old_within_24_hours,
            old_within_24_hours - datetime.timedelta(hours=1))
        self._withdraw(p_withdrawn_old_on_time, within_24_hours)

        p_withdrawn_late_and_missing = self._insert_participant()
        self._insert_order(p_withdrawn_late_and_missing, 'WithdrawnSlowOrder',
                           BIOBANK_TESTS[:2], order_time)
        self._insert_samples(p_withdrawn_late_and_missing, [BIOBANK_TESTS[0]],
                             ['WithdrawnLateSample'], late_time,
                             late_time - datetime.timedelta(minutes=59))
        p_withdrawn_late_and_missing = (self.participant_dao.get(
            p_withdrawn_late_and_missing.participantId))
        self._withdraw(p_withdrawn_late_and_missing, within_24_hours)

        p_withdrawn_old_late_and_missing = self._insert_participant()
        self._insert_order(p_withdrawn_old_late_and_missing,
                           'WithdrawnOldSlowOrder', BIOBANK_TESTS[:2],
                           old_order_time)
        self._insert_samples(p_withdrawn_old_late_and_missing,
                             [BIOBANK_TESTS[0]], ['WithdrawnOldLateSample'],
                             old_late_time,
                             old_late_time - datetime.timedelta(minutes=59))
        p_withdrawn_old_late_and_missing = (self.participant_dao.get(
            p_withdrawn_old_late_and_missing.participantId))
        self._withdraw(p_withdrawn_old_late_and_missing, old_late_time)

        p_withdrawn_extra = self._insert_participant(
            race_codes=[RACE_WHITE_CODE])
        self._insert_samples(p_withdrawn_extra, [BIOBANK_TESTS[-1]],
                             ['WithdrawnNobodyOrderedThisSample'], order_time,
                             order_time - datetime.timedelta(hours=1))
        self._withdraw(p_withdrawn_extra, within_24_hours)

        p_withdrawn_old_extra = self._insert_participant(
            race_codes=[RACE_AIAN_CODE])
        self._insert_samples(p_withdrawn_old_extra, [BIOBANK_TESTS[-1]],
                             ['WithdrawnOldNobodyOrderedThisSample'],
                             old_order_time,
                             old_order_time - datetime.timedelta(hours=1))
        self._withdraw(p_withdrawn_old_extra, within_24_hours)

        p_withdrawn_race_change = self._insert_participant(
            race_codes=[RACE_AIAN_CODE])
        p_withdrawn_race_change_id = to_client_participant_id(
            p_withdrawn_race_change.participantId)
        self._submit_race_questionnaire_response(p_withdrawn_race_change_id,
                                                 [RACE_WHITE_CODE])
        self._withdraw(p_withdrawn_race_change, within_24_hours)

        # for the same participant/test, 3 orders sent and only 2 samples received. Shows up in both
        # missing (we are missing one sample) and late (the two samples that were received were after
        # 24 hours.)
        p_repeated = self._insert_participant()
        for repetition in xrange(3):
            self._insert_order(
                p_repeated, 'RepeatedOrder%d' % repetition, [BIOBANK_TESTS[0]],
                two_days_ago + datetime.timedelta(hours=repetition))
            if repetition != 2:
                self._insert_samples(
                    p_repeated, [BIOBANK_TESTS[0]],
                    ['RepeatedSample%d' % repetition],
                    within_24_hours + datetime.timedelta(hours=repetition),
                    within_24_hours + datetime.timedelta(hours=repetition - 1))

        received, late, missing, withdrawals = 'rx.csv', 'late.csv', 'missing.csv', 'withdrawals.csv'
        exporter = InMemorySqlExporter(self)
        biobank_samples_pipeline._query_and_write_reports(
            exporter, file_time, received, late, missing, withdrawals)

        exporter.assertFilesEqual((received, late, missing, withdrawals))

        # sent-and-received: 4 on-time, 2 late, none of the missing/extra/repeated ones;
        # includes orders/samples from more than 7 days ago
        exporter.assertRowCount(received, 6)
        exporter.assertColumnNamesEqual(received, _CSV_COLUMN_NAMES)
        row = exporter.assertHasRow(
            received, {
                'biobank_id': to_client_biobank_id(p_on_time.biobankId),
                'sent_test': BIOBANK_TESTS[0],
                'received_test': BIOBANK_TESTS[0]
            })
        # Also check the values of all remaining fields on one row.
        self.assertEquals(row['source_site_name'],
                          'Monroeville Urgent Care Center')
        self.assertEquals(row['source_site_consortium'], 'Pittsburgh')
        self.assertEquals(row['source_site_mayolink_client_number'], '7035769')
        self.assertEquals(row['source_site_hpo'], 'PITT')
        self.assertEquals(row['source_site_hpo_type'], 'HPO')
        self.assertEquals(row['finalized_site_name'],
                          'Monroeville Urgent Care Center')
        self.assertEquals(row['finalized_site_consortium'], 'Pittsburgh')
        self.assertEquals(row['finalized_site_mayolink_client_number'],
                          '7035769')
        self.assertEquals(row['finalized_site_hpo'], 'PITT')
        self.assertEquals(row['finalized_site_hpo_type'], 'HPO')
        self.assertEquals(row['finalized_username'], '*****@*****.**')
        self.assertEquals(row['sent_finalized_time'],
                          database_utils.format_datetime(order_time))
        self.assertEquals(row['sent_collection_time'],
                          database_utils.format_datetime(order_time))
        self.assertEquals(row['sent_processed_time'],
                          database_utils.format_datetime(order_time))
        self.assertEquals(row['received_time'],
                          database_utils.format_datetime(within_24_hours))
        self.assertEquals(
            row['Sample Family Create Date'],
            database_utils.format_datetime(within_24_hours -
                                           datetime.timedelta(hours=1)))
        self.assertEquals(row['sent_count'], '1')
        self.assertEquals(row['received_count'], '1')
        self.assertEquals(row['sent_order_id'], 'OGoodOrder')
        self.assertEquals(row['received_sample_id'], 'GoodSample1')
        self.assertEquals(row['biospecimen_kit_id'], 'kit1')
        self.assertEquals(row['fedex_tracking_number'], 't1')
        # the other sent-and-received rows
        exporter.assertHasRow(
            received, {
                'biobank_id': to_client_biobank_id(p_on_time.biobankId),
                'sent_test': BIOBANK_TESTS[1]
            })
        exporter.assertHasRow(
            received, {
                'biobank_id': to_client_biobank_id(
                    p_late_and_missing.biobankId),
                'sent_test': BIOBANK_TESTS[0]
            })
        exporter.assertHasRow(
            received, {
                'biobank_id': to_client_biobank_id(p_old_on_time.biobankId),
                'sent_test': BIOBANK_TESTS[0]
            })
        exporter.assertHasRow(
            received, {
                'biobank_id': to_client_biobank_id(p_old_on_time.biobankId),
                'sent_test': BIOBANK_TESTS[1]
            })
        exporter.assertHasRow(
            received, {
                'biobank_id':
                to_client_biobank_id(p_old_late_and_missing.biobankId),
                'sent_test':
                BIOBANK_TESTS[0]
            })

        # sent-and-received: 2 late; don't include orders/samples from more than 7 days ago
        exporter.assertRowCount(late, 2)
        exporter.assertColumnNamesEqual(late, _CSV_COLUMN_NAMES)
        exporter.assertHasRow(
            late, {
                'biobank_id': to_client_biobank_id(
                    p_late_and_missing.biobankId),
                'sent_order_id': 'O%s' % o_late_and_missing.biobankOrderId,
                'elapsed_hours': '24'
            })
        exporter.assertHasRow(
            late, {
                'biobank_id': to_client_biobank_id(p_repeated.biobankId),
                'elapsed_hours': '45'
            })

        # orders/samples where something went wrong; don't include orders/samples from more than 7
        # days ago, or where 24 hours hasn't elapsed yet.
        exporter.assertRowCount(missing, 4)
        exporter.assertColumnNamesEqual(missing, _CSV_COLUMN_NAMES)
        # sample received, nothing ordered
        exporter.assertHasRow(
            missing, {
                'biobank_id': to_client_biobank_id(p_extra.biobankId),
                'sent_order_id': ''
            })
        # order received, no sample
        exporter.assertHasRow(
            missing, {
                'biobank_id': to_client_biobank_id(
                    p_two_days_missing.biobankId),
                'sent_order_id': 'OTwoDaysMissingOrder',
                'sent_test': BIOBANK_TESTS[0]
            })
        exporter.assertHasRow(
            missing, {
                'biobank_id': to_client_biobank_id(
                    p_two_days_missing.biobankId),
                'sent_order_id': 'OTwoDaysMissingOrder',
                'sent_test': BIOBANK_TESTS[1]
            })

        # 3 orders sent, only 2 received
        multi_sample_row = exporter.assertHasRow(
            missing, {
                'biobank_id': to_client_biobank_id(p_repeated.biobankId),
                'sent_count': '3',
                'received_count': '2'
            })

        # Also verify the comma-joined fields of the row with multiple orders/samples.
        self.assertItemsEqual(
            multi_sample_row['sent_order_id'].split(','),
            ['ORepeatedOrder1', 'ORepeatedOrder0', 'ORepeatedOrder2'])
        self.assertItemsEqual(
            multi_sample_row['received_sample_id'].split(','),
            ['RepeatedSample0', 'RepeatedSample1'])

        # We don't include the old withdrawal.
        exporter.assertRowCount(withdrawals, 5)
        exporter.assertHasRow(
            withdrawals, {
                'biobank_id':
                to_client_biobank_id(p_withdrawn_old_on_time.biobankId),
                'withdrawal_time':
                database_utils.format_datetime(within_24_hours),
                'is_native_american':
                'Y'
            })
        exporter.assertHasRow(
            withdrawals, {
                'biobank_id':
                to_client_biobank_id(p_withdrawn_late_and_missing.biobankId),
                'withdrawal_time':
                database_utils.format_datetime(within_24_hours),
                'is_native_american':
                'N'
            })
        exporter.assertHasRow(
            withdrawals, {
                'biobank_id': to_client_biobank_id(
                    p_withdrawn_extra.biobankId),
                'withdrawal_time':
                database_utils.format_datetime(within_24_hours),
                'is_native_american': 'N'
            })
        exporter.assertHasRow(
            withdrawals, {
                'biobank_id': to_client_biobank_id(
                    p_withdrawn_old_extra.biobankId),
                'withdrawal_time':
                database_utils.format_datetime(within_24_hours),
                'is_native_american': 'Y'
            })
        exporter.assertHasRow(
            withdrawals, {
                'biobank_id':
                to_client_biobank_id(p_withdrawn_race_change.biobankId),
                'withdrawal_time':
                database_utils.format_datetime(within_24_hours),
                'is_native_american':
                'N'
            })
class GenomicSetFileHandlerTest(CloudStorageSqlTestBase, NdbTestBase):
  def setUp(self):
    super(GenomicSetFileHandlerTest, self).setUp(use_mysql=True)
    NdbTestBase.doSetUp(self)
    TestBase.setup_fake(self)
    # Everything is stored as a list, so override bucket name as a 1-element list.
    config.override_setting(config.GENOMIC_SET_BUCKET_NAME, [_FAKE_BUCKET])
    config.override_setting(config.BIOBANK_SAMPLES_BUCKET_NAME, [_FAKE_BUCKET])
    config.override_setting(config.GENOMIC_BIOBANK_MANIFEST_FOLDER_NAME, [_FAKE_BUCKET_FOLDER])
    self.participant_dao = ParticipantDao()
    self.summary_dao = ParticipantSummaryDao()

  def _write_cloud_csv(self, file_name, contents_str):
    with cloudstorage_api.open('/%s/%s' % (_FAKE_BUCKET, file_name), mode='w') as cloud_file:
      cloud_file.write(contents_str.encode('utf-8'))

  def _make_biobank_order(self, **kwargs):
    """Makes a new BiobankOrder (same values every time) with valid/complete defaults.

    Kwargs pass through to BiobankOrder constructor, overriding defaults.
    """
    participant_id = kwargs['participantId']
    modified = datetime.datetime(2019, 03, 25, 15, 59, 30)

    for k, default_value in (
        ('biobankOrderId', u'1'),
        ('created', clock.CLOCK.now()),
        ('sourceSiteId', 1),
        ('sourceUsername', u'*****@*****.**'),
        ('collectedSiteId', 1),
        ('collectedUsername', u'*****@*****.**'),
        ('processedSiteId', 1),
        ('processedUsername', u'*****@*****.**'),
        ('finalizedSiteId', 2),
        ('finalizedUsername', u'*****@*****.**'),
        ('version', 1),
        ('identifiers', [BiobankOrderIdentifier(system=u'a', value=u'c')]),
        ('samples', [BiobankOrderedSample(
            test=u'1SAL2',
            description=u'description',
            processingRequired=True)]),
        ('dvOrders', [BiobankDVOrder(
          participantId=participant_id, modified=modified, version=1)])):
      if k not in kwargs:
        kwargs[k] = default_value
    return BiobankOrder(**kwargs)

  def test_no_file_found(self):
    # If no file found, it will not raise any error
    self.assertIsNone(genomic_set_file_handler.read_genomic_set_from_bucket())

  def test_read_from_csv_file(self):
    participant = self.participant_dao.insert(Participant(participantId=123, biobankId=1234))
    self.summary_dao.insert(self.participant_summary(participant))
    bo = self._make_biobank_order(participantId=participant.participantId, biobankOrderId='123',
                                  identifiers=[BiobankOrderIdentifier(
                                    system=u'https://www.pmi-ops.org', value=u'12345678')])
    BiobankOrderDao().insert(bo)

    participant2 = self.participant_dao.insert(Participant(participantId=124, biobankId=1235))
    self.summary_dao.insert(self.participant_summary(participant2))
    bo2 = self._make_biobank_order(participantId=participant2.participantId, biobankOrderId='124',
                                   identifiers=[BiobankOrderIdentifier(
                                     system=u'https://www.pmi-ops.org', value=u'12345679')])
    BiobankOrderDao().insert(bo2)

    participant3 = self.participant_dao.insert(Participant(participantId=125, biobankId=1236))
    self.summary_dao.insert(self.participant_summary(participant3))
    bo3 = self._make_biobank_order(participantId=participant3.participantId, biobankOrderId='125',
                                   identifiers=[BiobankOrderIdentifier(
                                     system=u'https://www.pmi-ops.org', value=u'12345680')])
    BiobankOrderDao().insert(bo3)

    samples_file = test_data.open_genomic_set_file('Genomic-Test-Set-test-1.csv')

    input_filename = 'cloud%s.csv' % self._naive_utc_to_naive_central(clock.CLOCK.now()).strftime(
        genomic_set_file_handler.INPUT_CSV_TIME_FORMAT)

    self._write_cloud_csv(input_filename, samples_file)
    genomic_set_file_handler.read_genomic_set_from_bucket()
    set_dao = GenomicSetDao()
    obj = set_dao.get_all()[0]

    self.assertEqual(obj.genomicSetName, 'name_xxx')
    self.assertEqual(obj.genomicSetCriteria, 'criteria_xxx')
    self.assertEqual(obj.genomicSetVersion, 1)

    member_dao = GenomicSetMemberDao()
    items = member_dao.get_all()
    for item in items:
      self.assertIn(item.participantId, [123, 124, 125])
      self.assertIn(item.biobankOrderId, ['123', '124', '125'])
      self.assertIn(item.biobankId, ['1234', '1235', '1236'])
      self.assertIn(item.biobankOrderClientId, ['12345678', '12345679', '12345680'])
      self.assertEqual(item.genomicSetId, 1)
      self.assertIn(item.genomeType, ['aou_wgs', 'aou_array'])
      self.assertIn(item.nyFlag, [0, 1])
      self.assertIn(item.sexAtBirth, ['F', 'M'])

  def test_create_genomic_set_result_file(self):
    participant = self.participant_dao.insert(Participant(participantId=123, biobankId=123))
    self.summary_dao.insert(self.participant_summary(participant))
    bo = self._make_biobank_order(participantId=participant.participantId, biobankOrderId='123',
                                  identifiers=[BiobankOrderIdentifier(
                                    system=u'https://www.pmi-ops.org', value=u'12345678')])
    BiobankOrderDao().insert(bo)

    participant2 = self.participant_dao.insert(Participant(participantId=124, biobankId=124))
    self.summary_dao.insert(self.participant_summary(participant2))
    bo2 = self._make_biobank_order(participantId=participant2.participantId, biobankOrderId='124',
                                   identifiers=[BiobankOrderIdentifier(
                                     system=u'https://www.pmi-ops.org', value=u'12345679')])
    BiobankOrderDao().insert(bo2)

    participant3 = self.participant_dao.insert(Participant(participantId=125, biobankId=125))
    self.summary_dao.insert(self.participant_summary(participant3))
    bo3 = self._make_biobank_order(participantId=participant3.participantId, biobankOrderId='125',
                                   identifiers=[BiobankOrderIdentifier(
                                     system=u'https://www.pmi-ops.org', value=u'12345680')])
    BiobankOrderDao().insert(bo3)

    genomic_set = self._create_fake_genomic_set('fake_genomic_set_name',
                                                'fake_genomic_set_criteria',
                                                'Genomic-Test-Set-v12019-04-05-00-30-10.CSV')
    self._create_fake_genomic_member(genomic_set.id, participant.participantId, bo.biobankOrderId,
                                     participant.biobankId, bo.identifiers[0].value,
                                     validation_status=GenomicValidationStatus.VALID,
                                     sex_at_birth='F', genome_type='aou_array', ny_flag='Y')

    self._create_fake_genomic_member(genomic_set.id, participant2.participantId, bo2.biobankOrderId,
                                     participant2.biobankId, bo2.identifiers[0].value,
                                     validation_status=GenomicValidationStatus.INVALID_AGE,
                                     sex_at_birth='M', genome_type='aou_array', ny_flag='N')

    self._create_fake_genomic_member(genomic_set.id, participant3.participantId, bo3.biobankOrderId,
                                     participant3.biobankId, bo3.identifiers[0].value,
                                     validation_status=GenomicValidationStatus.INVALID_CONSENT,
                                     sex_at_birth='F', genome_type='aou_wgs', ny_flag='Y')

    genomic_set_file_handler.create_genomic_set_status_result_file(genomic_set.id)

    expected_result_filename = 'Genomic-Test-Set-v12019-04-05-00-30-10-Validation-Result.CSV'
    bucket_name = config.getSetting(config.GENOMIC_SET_BUCKET_NAME)
    path = '/' + bucket_name + '/' + expected_result_filename
    csv_file = cloudstorage_api.open(path)
    csv_reader = csv.DictReader(csv_file, delimiter=',')

    class ResultCsvColumns(object):
      """Names of CSV columns that we read from the genomic set upload."""
      GENOMIC_SET_NAME = 'genomic_set_name'
      GENOMIC_SET_CRITERIA = 'genomic_set_criteria'
      PID = 'pid'
      BIOBANK_ORDER_ID = 'biobank_order_id'
      NY_FLAG = 'ny_flag'
      SEX_AT_BIRTH = 'sex_at_birth'
      GENOME_TYPE = 'genome_type'
      STATUS = 'status'
      INVALID_REASON = 'invalid_reason'

      ALL = (GENOMIC_SET_NAME, GENOMIC_SET_CRITERIA, PID, BIOBANK_ORDER_ID, NY_FLAG, SEX_AT_BIRTH,
             GENOME_TYPE, STATUS, INVALID_REASON)

    missing_cols = set(ResultCsvColumns.ALL) - set(csv_reader.fieldnames)
    self.assertEqual(len(missing_cols), 0)
    rows = list(csv_reader)
    self.assertEqual(len(rows), 3)
    self.assertEqual(rows[0][ResultCsvColumns.GENOMIC_SET_NAME], 'fake_genomic_set_name')
    self.assertEqual(rows[0][ResultCsvColumns.GENOMIC_SET_CRITERIA], 'fake_genomic_set_criteria')
    self.assertEqual(rows[0][ResultCsvColumns.STATUS], 'valid')
    self.assertEqual(rows[0][ResultCsvColumns.INVALID_REASON], '')
    self.assertEqual(rows[0][ResultCsvColumns.PID], '123')
    self.assertEqual(rows[0][ResultCsvColumns.BIOBANK_ORDER_ID], '123')
    self.assertEqual(rows[0][ResultCsvColumns.NY_FLAG], 'Y')
    self.assertEqual(rows[0][ResultCsvColumns.GENOME_TYPE], 'aou_array')
    self.assertEqual(rows[0][ResultCsvColumns.SEX_AT_BIRTH], 'F')

    self.assertEqual(rows[1][ResultCsvColumns.GENOMIC_SET_NAME], 'fake_genomic_set_name')
    self.assertEqual(rows[1][ResultCsvColumns.GENOMIC_SET_CRITERIA], 'fake_genomic_set_criteria')
    self.assertEqual(rows[1][ResultCsvColumns.STATUS], 'invalid')
    self.assertEqual(rows[1][ResultCsvColumns.INVALID_REASON], 'INVALID_AGE')
    self.assertEqual(rows[1][ResultCsvColumns.PID], '124')
    self.assertEqual(rows[1][ResultCsvColumns.BIOBANK_ORDER_ID], '124')
    self.assertEqual(rows[1][ResultCsvColumns.NY_FLAG], 'N')
    self.assertEqual(rows[1][ResultCsvColumns.GENOME_TYPE], 'aou_array')
    self.assertEqual(rows[1][ResultCsvColumns.SEX_AT_BIRTH], 'M')

    self.assertEqual(rows[2][ResultCsvColumns.GENOMIC_SET_NAME], 'fake_genomic_set_name')
    self.assertEqual(rows[2][ResultCsvColumns.GENOMIC_SET_CRITERIA], 'fake_genomic_set_criteria')
    self.assertEqual(rows[2][ResultCsvColumns.STATUS], 'invalid')
    self.assertEqual(rows[2][ResultCsvColumns.INVALID_REASON], 'INVALID_CONSENT')
    self.assertEqual(rows[2][ResultCsvColumns.PID], '125')
    self.assertEqual(rows[2][ResultCsvColumns.BIOBANK_ORDER_ID], '125')
    self.assertEqual(rows[2][ResultCsvColumns.NY_FLAG], 'Y')
    self.assertEqual(rows[2][ResultCsvColumns.GENOME_TYPE], 'aou_wgs')
    self.assertEqual(rows[2][ResultCsvColumns.SEX_AT_BIRTH], 'F')

  def test_create_and_upload_biobank_manifest_file(self):
    participant = self.participant_dao.insert(Participant(participantId=123, biobankId=123))
    self.summary_dao.insert(self.participant_summary(participant))
    bo = self._make_biobank_order(participantId=participant.participantId, biobankOrderId='123',
                                  identifiers=[BiobankOrderIdentifier(
                                    system=u'https://www.pmi-ops.org', value=u'12345678')])
    BiobankOrderDao().insert(bo)

    participant2 = self.participant_dao.insert(Participant(participantId=124, biobankId=124))
    self.summary_dao.insert(self.participant_summary(participant2))
    bo2 = self._make_biobank_order(participantId=participant2.participantId, biobankOrderId='124',
                                   identifiers=[BiobankOrderIdentifier(
                                     system=u'https://www.pmi-ops.org', value=u'12345679')])
    BiobankOrderDao().insert(bo2)

    participant3 = self.participant_dao.insert(Participant(participantId=125, biobankId=125))
    self.summary_dao.insert(self.participant_summary(participant3))
    bo3 = self._make_biobank_order(participantId=participant3.participantId, biobankOrderId='125',
                                   identifiers=[BiobankOrderIdentifier(
                                     system=u'https://www.pmi-ops.org', value=u'12345680')])
    BiobankOrderDao().insert(bo3)

    genomic_set = self._create_fake_genomic_set('fake_genomic_set_name',
                                                'fake_genomic_set_criteria',
                                                'Genomic-Test-Set-v12019-04-05-00-30-10.CSV')
    self._create_fake_genomic_member(genomic_set.id, participant.participantId, bo.biobankOrderId,
                                     participant.biobankId, bo.identifiers[0].value,
                                     validation_status=GenomicValidationStatus.VALID,
                                     sex_at_birth='F', genome_type='aou_array', ny_flag='Y')

    self._create_fake_genomic_member(genomic_set.id, participant2.participantId, bo2.biobankOrderId,
                                     participant2.biobankId, bo2.identifiers[0].value,
                                     validation_status=GenomicValidationStatus.INVALID_AGE,
                                     sex_at_birth='M', genome_type='aou_array', ny_flag='N')

    self._create_fake_genomic_member(genomic_set.id, participant3.participantId, bo3.biobankOrderId,
                                     participant3.biobankId, bo3.identifiers[0].value,
                                     validation_status=GenomicValidationStatus.INVALID_CONSENT,
                                     sex_at_birth='F', genome_type='aou_wgs', ny_flag='Y')

    now = clock.CLOCK.now()
    genomic_biobank_menifest_handler\
      .create_and_upload_genomic_biobank_manifest_file(genomic_set.id, now)

    bucket_name = config.getSetting(config.BIOBANK_SAMPLES_BUCKET_NAME)
    # convert UTC to CDT
    now_cdt_str = _UTC.localize(now).astimezone(_US_CENTRAL).replace(tzinfo=None) \
      .strftime(_OUTPUT_CSV_TIME_FORMAT)

    class ExpectedCsvColumns(object):
      VALUE = 'value'
      BIOBANK_ID = 'biobank_id'
      SEX_AT_BIRTH = 'sex_at_birth'
      GENOME_TYPE = 'genome_type'
      NY_FLAG = 'ny_flag'
      REQUEST_ID = 'request_id'
      PACKAGE_ID = 'package_id'

      ALL = (VALUE, SEX_AT_BIRTH, GENOME_TYPE, NY_FLAG, REQUEST_ID, PACKAGE_ID)

    expected_result_filename = 'rdr_fake_sub_folder/Genomic-Manifest-AoU-1-v1' + \
                               now_cdt_str + '.CSV'
    path = '/' + bucket_name + '/' + expected_result_filename
    csv_file = cloudstorage_api.open(path)
    csv_reader = csv.DictReader(csv_file, delimiter=',')

    missing_cols = set(ExpectedCsvColumns.ALL) - set(csv_reader.fieldnames)
    self.assertEqual(len(missing_cols), 0)
    rows = list(csv_reader)
    self.assertEqual(rows[0][ExpectedCsvColumns.VALUE], '12345678')
    self.assertEqual(rows[0][ExpectedCsvColumns.BIOBANK_ID], '123')
    self.assertEqual(rows[0][ExpectedCsvColumns.SEX_AT_BIRTH], 'F')
    self.assertEqual(rows[0][ExpectedCsvColumns.GENOME_TYPE], 'aou_array')
    self.assertEqual(rows[0][ExpectedCsvColumns.NY_FLAG], 'Y')
    self.assertEqual(rows[1][ExpectedCsvColumns.VALUE], '12345679')
    self.assertEqual(rows[1][ExpectedCsvColumns.BIOBANK_ID], '124')
    self.assertEqual(rows[1][ExpectedCsvColumns.SEX_AT_BIRTH], 'M')
    self.assertEqual(rows[1][ExpectedCsvColumns.GENOME_TYPE], 'aou_array')
    self.assertEqual(rows[1][ExpectedCsvColumns.NY_FLAG], 'N')
    self.assertEqual(rows[2][ExpectedCsvColumns.VALUE], '12345680')
    self.assertEqual(rows[2][ExpectedCsvColumns.BIOBANK_ID], '125')
    self.assertEqual(rows[2][ExpectedCsvColumns.SEX_AT_BIRTH], 'F')
    self.assertEqual(rows[2][ExpectedCsvColumns.GENOME_TYPE], 'aou_wgs')
    self.assertEqual(rows[2][ExpectedCsvColumns.NY_FLAG], 'Y')

  def _create_fake_genomic_set(self, genomic_set_name, genomic_set_criteria, genomic_set_filename):
    now = clock.CLOCK.now()
    genomic_set = GenomicSet()
    genomic_set.genomicSetName = genomic_set_name
    genomic_set.genomicSetCriteria = genomic_set_criteria
    genomic_set.genomicSetFile = genomic_set_filename
    genomic_set.genomicSetFileTime = now
    genomic_set.genomicSetStatus = GenomicSetStatus.INVALID

    set_dao = GenomicSetDao()
    genomic_set.genomicSetVersion = set_dao.get_new_version_number(genomic_set.genomicSetName)
    genomic_set.created = now
    genomic_set.modified = now

    set_dao.insert(genomic_set)

    return genomic_set

  def _create_fake_genomic_member(self, genomic_set_id, participant_id, biobank_order_id,
                                  biobank_id, biobank_order_client_id,
                                  validation_status=GenomicValidationStatus.VALID,
                                  sex_at_birth='F', genome_type='aou_array', ny_flag='Y'):
    now = clock.CLOCK.now()
    genomic_set_member = GenomicSetMember()
    genomic_set_member.genomicSetId = genomic_set_id
    genomic_set_member.created = now
    genomic_set_member.modified = now
    genomic_set_member.validationStatus = validation_status
    genomic_set_member.participantId = participant_id
    genomic_set_member.sexAtBirth = sex_at_birth
    genomic_set_member.genomeType = genome_type
    genomic_set_member.nyFlag = 1 if ny_flag == 'Y' else 0
    genomic_set_member.biobankOrderId = biobank_order_id
    genomic_set_member.biobankId = biobank_id
    genomic_set_member.biobankOrderClientId = biobank_order_client_id

    member_dao = GenomicSetMemberDao()
    member_dao.insert(genomic_set_member)

  def _naive_utc_to_naive_central(self, naive_utc_date):
    utc_date = pytz.utc.localize(naive_utc_date)
    central_date = utc_date.astimezone(pytz.timezone('US/Central'))
    return central_date.replace(tzinfo=None)
Exemple #12
0
class OrganizationDaoTest(SqlTestBase):

  def setUp(self):
    super(OrganizationDaoTest, self).setUp()
    self.organization_dao = OrganizationDao()
    self.participant_dao = ParticipantDao()
    self.ps_dao = ParticipantSummaryDao()
    self.ps_history = ParticipantHistoryDao()

  def test_insert(self):
    organization = Organization(externalId='myorg', displayName='myorg_display',
                                hpoId=PITT_HPO_ID, isObsolete=1)
    created_organization = self.organization_dao.insert(organization)
    new_organization = self.organization_dao.get(created_organization.organizationId)
    organization.organizationId = created_organization.organizationId
    organization.isObsolete = new_organization.isObsolete
    self.assertEquals(organization.asdict(), new_organization.asdict())

  def test_participant_pairing_updates_onchange(self):
    provider_link = '[{"organization": {"reference": "Organization/AZ_TUCSON"}, "primary": true}]'
    TIME = datetime.datetime(2018, 1, 1)
    TIME2 = datetime.datetime(2018, 1, 2)
    insert_org = self.organization_dao.insert(
      Organization(externalId='tardis', displayName='bluebox', hpoId=PITT_HPO_ID))

    with FakeClock(TIME):
      self.participant_dao.insert(Participant(participantId=1, biobankId=2))
      participant = self.participant_dao.get(1)
      participant.organizationId = insert_org.organizationId
      self.participant_dao.update(participant)

      self.assertEquals(participant.hpoId, insert_org.hpoId)
      participant = self.participant_dao.get(1)
      p_summary = self.ps_dao.insert(self.participant_summary(participant))

    with FakeClock(TIME2):
      insert_org.hpoId = AZ_HPO_ID
      self.organization_dao.update(insert_org)

    new_org = self.organization_dao.get_by_external_id('tardis')
    ps = self.ps_dao.get(p_summary.participantId)
    ph = self.ps_history.get([participant.participantId, 2])
    participant = self.participant_dao.get(1)

    self.assertEquals(ps.lastModified, TIME2)
    self.assertEquals(ps.hpoId, new_org.hpoId)
    self.assertEquals(ph.hpoId, insert_org.hpoId)
    self.assertEquals(ph.organizationId, insert_org.organizationId)
    self.assertEquals(new_org.hpoId, participant.hpoId)
    self.assertEquals(new_org.organizationId, participant.organizationId)
    self.assertIsNone(participant.siteId)
    self.assertEquals(participant.providerLink, provider_link)

  def test_participant_different_hpo_does_not_change(self):
    insert_org = self.organization_dao.insert(
      Organization(externalId='stark_industries', displayName='ironman', hpoId=PITT_HPO_ID))

    self.participant_dao.insert(Participant(participantId=1, biobankId=2))
    participant = self.participant_dao.get(1)
    participant.hpoId = UNSET_HPO_ID
    self.participant_dao.update(participant)
    insert_org.hpoId = AZ_HPO_ID
    self.organization_dao.update(insert_org)
    new_org = self.organization_dao.get_by_external_id('stark_industries')
    participant = self.participant_dao.get(1)
    self.assertNotEqual(new_org.hpoId, participant.hpoId)
    self.assertEqual(new_org.hpoId, AZ_HPO_ID)
    self.assertEqual(participant.hpoId, UNSET_HPO_ID)
class EhrReceiptDaoTest(SqlTestBase):
    def setUp(self, with_data=True, use_mysql=True):
        super(EhrReceiptDaoTest, self).setUp(with_data=with_data,
                                             use_mysql=use_mysql)
        self.setup_fake()
        self.calendar_dao = CalendarDao()
        self.org_dao = OrganizationDao()
        self.hpo_dao = HPODao()
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.ehr_receipt_dao = EhrReceiptDao()
        self._setup_initial_data()

    @staticmethod
    def _iter_dates_in_range(start, end):
        current = start
        while current <= end:
            yield current
            current += datetime.timedelta(days=1)

    def _fill_calendar_range(self, start, end):
        for date in self._iter_dates_in_range(start, end):
            self.calendar_dao.insert(Calendar(day=date))

    def _make_hpo(self, int_id, string_id):
        hpo = HPO(hpoId=int_id, name=string_id)
        self.hpo_dao.insert(hpo)
        return hpo

    def _make_org(self, **kwargs):
        org = Organization(**kwargs)
        self.org_dao.insert(org)
        return org

    def _make_participant(self, org, int_id):
        participant = self._participant_with_defaults(participantId=int_id,
                                                      biobankId=int_id)
        participant.hpoId = org.hpoId
        participant.organizationId = org.organizationId
        self.participant_dao.insert(participant)
        summary = self.participant_summary(participant)
        summary.hpoId = participant.hpoId
        summary.organizationId = participant.organizationId
        self.summary_dao.insert(summary)
        return participant, summary

    def _update_ehr(self, participant_summary, update_time):
        self.summary_dao.update_ehr_status(participant_summary, update_time)
        self.summary_dao.update(participant_summary)

    def _save_ehr_receipt(self, org, receipt_time):
        receipt = EhrReceipt(organizationId=org.organizationId,
                             receiptTime=receipt_time)
        self.ehr_receipt_dao.insert(receipt)

    def _setup_initial_data(self):
        self.hpo_foo = self._make_hpo(int_id=10, string_id='hpo_foo')
        self.hpo_bar = self._make_hpo(int_id=11, string_id='hpo_bar')

        self.org_foo_a = self._make_org(organizationId=10,
                                        externalId='FOO_A',
                                        displayName='Foo A',
                                        hpoId=self.hpo_foo.hpoId)
        self.org_bar_a = self._make_org(organizationId=11,
                                        externalId='BAR_A',
                                        displayName='Bar A',
                                        hpoId=self.hpo_bar.hpoId)

        participant_and_summary_pairs = [
            self._make_participant(org=self.org_foo_a, int_id=11),
            self._make_participant(org=self.org_foo_a, int_id=12),
            self._make_participant(org=self.org_bar_a, int_id=13),
            self._make_participant(org=self.org_bar_a, int_id=14),
        ]
        self.participants = {
            participant.participantId: participant
            for participant, summary in participant_and_summary_pairs
        }
        self.summaries = {
            participant.participantId: summary
            for participant, summary in participant_and_summary_pairs
        }

    def test_get_active_organization_counts_in_interval_day(self):
        self._fill_calendar_range(datetime.date(2019, 1, 1),
                                  datetime.date(2019, 3, 1))

        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 2, 2))
        self._save_ehr_receipt(org=self.org_bar_a,
                               receipt_time=datetime.datetime(2019, 2, 2))
        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 2, 4))

        results = self.ehr_receipt_dao.get_active_organization_counts_in_interval(
            start_date=datetime.datetime(2019, 2, 1),
            end_date=datetime.datetime(2019, 2, 7),
            interval=INTERVAL_DAY)

        self.assertEqual([(r['start_date'], r['active_organization_count'])
                          for r in results], [
                              (datetime.date(2019, 2, 1), 0L),
                              (datetime.date(2019, 2, 2), 2L),
                              (datetime.date(2019, 2, 3), 0L),
                              (datetime.date(2019, 2, 4), 1L),
                              (datetime.date(2019, 2, 5), 0L),
                              (datetime.date(2019, 2, 6), 0L),
                              (datetime.date(2019, 2, 7), 0L),
                          ])

    def test_get_active_organization_counts_in_interval_week(self):
        self._fill_calendar_range(datetime.date(2019, 1, 1),
                                  datetime.date(2019, 3, 1))

        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 2, 4))
        self._save_ehr_receipt(org=self.org_bar_a,
                               receipt_time=datetime.datetime(2019, 2, 4))
        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 2, 18))

        results = self.ehr_receipt_dao.get_active_organization_counts_in_interval(
            start_date=datetime.datetime(2019, 2, 1),
            end_date=datetime.datetime(2019, 3, 1),
            interval=INTERVAL_WEEK)

        self.assertEqual([(r['start_date'], r['active_organization_count'])
                          for r in results], [
                              (datetime.date(2019, 1, 27), 0L),
                              (datetime.date(2019, 2, 3), 2L),
                              (datetime.date(2019, 2, 10), 0L),
                              (datetime.date(2019, 2, 17), 1L),
                              (datetime.date(2019, 2, 24), 0L),
                          ])

    def test_get_active_organization_counts_in_interval_month(self):
        self._fill_calendar_range(datetime.date(2018, 12, 1),
                                  datetime.date(2019, 7, 1))

        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 2, 1))
        self._save_ehr_receipt(org=self.org_bar_a,
                               receipt_time=datetime.datetime(2019, 2, 1))
        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 4, 1))

        results = self.ehr_receipt_dao.get_active_organization_counts_in_interval(
            start_date=datetime.datetime(2019, 1, 1),
            end_date=datetime.datetime(2019, 5, 1),
            interval=INTERVAL_MONTH)

        self.assertEqual([(r['start_date'], r['active_organization_count'])
                          for r in results], [
                              (datetime.date(2019, 1, 1), 0L),
                              (datetime.date(2019, 2, 1), 2L),
                              (datetime.date(2019, 3, 1), 0L),
                              (datetime.date(2019, 4, 1), 1L),
                              (datetime.date(2019, 5, 1), 0L),
                          ])

    def test_get_active_organization_counts_in_interval_quarter(self):
        self._fill_calendar_range(datetime.date(2018, 12, 1),
                                  datetime.date(2020, 1, 1))

        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 5, 1))
        self._save_ehr_receipt(org=self.org_bar_a,
                               receipt_time=datetime.datetime(2019, 5, 1))
        self._save_ehr_receipt(org=self.org_foo_a,
                               receipt_time=datetime.datetime(2019, 11, 1))

        results = self.ehr_receipt_dao.get_active_organization_counts_in_interval(
            start_date=datetime.datetime(2019, 1, 1),
            end_date=datetime.datetime(2020, 1, 1),
            interval=INTERVAL_QUARTER)

        self.assertEqual([(r['start_date'], r['active_organization_count'])
                          for r in results], [
                              (datetime.date(2019, 1, 1), 0L),
                              (datetime.date(2019, 4, 1), 2L),
                              (datetime.date(2019, 7, 1), 0L),
                              (datetime.date(2019, 10, 1), 1L),
                              (datetime.date(2020, 1, 1), 0L),
                          ])
Exemple #14
0
class BiobankOrderApiTest(FlaskTestBase):
    def setUp(self):
        super(BiobankOrderApiTest, self).setUp()
        self.participant = Participant(participantId=123, biobankId=555)
        self.participant_dao = ParticipantDao()
        self.participant_dao.insert(self.participant)
        self.summary_dao = ParticipantSummaryDao()
        self.path = ('Participant/%s/BiobankOrder' %
                     to_client_participant_id(self.participant.participantId))

    def test_insert_and_refetch(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        self.create_and_verify_created_obj(
            self.path, load_biobank_order_json(self.participant.participantId))

    def test_insert_new_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)
        full_order_json = load_biobank_order_json(
            self.participant.participantId, filename='biobank_order_1.json')
        _strip_fields(result)
        _strip_fields(full_order_json)
        self.assertEquals(full_order_json, result)

    def test_error_no_summary(self):
        order_json = load_biobank_order_json(self.participant.participantId)
        self.send_post(self.path,
                       order_json,
                       expected_status=httplib.BAD_REQUEST)

    def test_error_missing_required_fields(self):
        order_json = load_biobank_order_json(self.participant.participantId)
        del order_json['identifier']
        self.send_post(self.path,
                       order_json,
                       expected_status=httplib.BAD_REQUEST)

    def test_no_duplicate_test_within_order(self):
        order_json = load_biobank_order_json(self.participant.participantId)
        order_json['samples'].extend(list(order_json['samples']))
        self.send_post(self.path,
                       order_json,
                       expected_status=httplib.BAD_REQUEST)

    def test_auto_pair_updates_participant_and_summary(self):
        self.summary_dao.insert(self.participant_summary(self.participant))

        # Sanity check: No HPO yet.
        p_unpaired = self.participant_dao.get(self.participant.participantId)
        self.assertEquals(p_unpaired.hpoId, UNSET_HPO_ID)
        self.assertIsNone(p_unpaired.providerLink)
        s_unpaired = self.summary_dao.get(self.participant.participantId)
        self.assertEquals(s_unpaired.hpoId, UNSET_HPO_ID)

        self.send_post(self.path,
                       load_biobank_order_json(self.participant.participantId))

        # Some HPO has been set. (ParticipantDao tests cover more detailed cases / specific values.)
        p_paired = self.participant_dao.get(self.participant.participantId)
        self.assertNotEqual(p_paired.hpoId, UNSET_HPO_ID)
        self.assertIsNotNone(p_paired.providerLink)
        s_paired = self.summary_dao.get(self.participant.participantId)
        self.assertNotEqual(s_paired.hpoId, UNSET_HPO_ID)
Exemple #15
0
class ParticipantSummaryDaoTest(NdbTestBase):
    def setUp(self):
        super(ParticipantSummaryDaoTest, self).setUp()
        self.dao = ParticipantSummaryDao()
        self.participant_dao = ParticipantDao()
        self.no_filter_query = Query([], None, 2, None)
        self.one_filter_query = Query(
            [FieldFilter("participantId", Operator.EQUALS, 1)], None, 2, None)
        self.two_filter_query = Query([
            FieldFilter("participantId", Operator.EQUALS, 1),
            FieldFilter("hpoId", Operator.EQUALS, PITT_HPO_ID)
        ], None, 2, None)
        self.ascending_biobank_id_query = Query([], OrderBy("biobankId", True),
                                                2, None)
        self.descending_biobank_id_query = Query([],
                                                 OrderBy("biobankId",
                                                         False), 2, None)
        self.enrollment_status_order_query = Query([],
                                                   OrderBy(
                                                       "enrollmentStatus",
                                                       True), 2, None)
        self.hpo_id_order_query = Query([], OrderBy("hpoId", True), 2, None)
        self.first_name_order_query = Query([], OrderBy("firstName", True), 2,
                                            None)

    def assert_no_results(self, query):
        results = self.dao.query(query)
        self.assertEquals([], results.items)
        self.assertIsNone(results.pagination_token)

    def assert_results(self, query, items, pagination_token=None):
        results = self.dao.query(query)
        self.assertListAsDictEquals(items, results.items)
        self.assertEquals(
            pagination_token, results.pagination_token,
            "Pagination tokens don't match; decoded = %s, %s" %
            (_decode_token(pagination_token),
             _decode_token(results.pagination_token)))

    def testQuery_noSummaries(self):
        self.assert_no_results(self.no_filter_query)
        self.assert_no_results(self.one_filter_query)
        self.assert_no_results(self.two_filter_query)
        self.assert_no_results(self.ascending_biobank_id_query)
        self.assert_no_results(self.descending_biobank_id_query)

    def _insert(self, participant, first_name=None, last_name=None):
        self.participant_dao.insert(participant)
        summary = self.participant_summary(participant)
        if first_name:
            summary.firstName = first_name
        if last_name:
            summary.lastName = last_name
        self.dao.insert(summary)
        return participant

    def testQuery_oneSummary(self):
        participant = Participant(participantId=1, biobankId=2)
        self._insert(participant)
        summary = self.dao.get(1)
        self.assert_results(self.no_filter_query, [summary])
        self.assert_results(self.one_filter_query, [summary])
        self.assert_no_results(self.two_filter_query)
        self.assert_results(self.ascending_biobank_id_query, [summary])
        self.assert_results(self.descending_biobank_id_query, [summary])

    def testUnicodeNameRoundTrip(self):
        name = self.fake.first_name()
        with self.assertRaises(UnicodeEncodeError):
            str(name)  # sanity check that the name contains non-ASCII
        participant = self._insert(Participant(participantId=1, biobankId=2))
        summary = self.dao.get(participant.participantId)
        summary.firstName = name
        self.dao.update(summary)
        fetched_summary = self.dao.get(participant.participantId)
        self.assertEquals(name, fetched_summary.firstName)

    def testQuery_twoSummaries(self):
        participant_1 = Participant(participantId=1, biobankId=2)
        self._insert(participant_1, 'Alice', 'Smith')
        participant_2 = Participant(participantId=2, biobankId=1)
        self._insert(participant_2, 'Zed', 'Zebra')
        ps_1 = self.dao.get(1)
        ps_2 = self.dao.get(2)
        self.assert_results(self.no_filter_query, [ps_1, ps_2])
        self.assert_results(self.one_filter_query, [ps_1])
        self.assert_no_results(self.two_filter_query)
        self.assert_results(self.ascending_biobank_id_query, [ps_2, ps_1])
        self.assert_results(self.descending_biobank_id_query, [ps_1, ps_2])

    def testQuery_threeSummaries_paginate(self):
        participant_1 = Participant(participantId=1, biobankId=4)
        self._insert(participant_1, 'Alice', 'Aardvark')
        participant_2 = Participant(participantId=2, biobankId=1)
        self._insert(participant_2, 'Bob', 'Builder')
        participant_3 = Participant(participantId=3, biobankId=3)
        self._insert(participant_3, 'Chad', 'Caterpillar')
        ps_1 = self.dao.get(1)
        ps_2 = self.dao.get(2)
        ps_3 = self.dao.get(3)
        self.assert_results(
            self.no_filter_query, [ps_1, ps_2],
            _make_pagination_token(['Builder', 'Bob', None, 2]))
        self.assert_results(self.one_filter_query, [ps_1])
        self.assert_no_results(self.two_filter_query)
        self.assert_results(
            self.ascending_biobank_id_query, [ps_2, ps_3],
            _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3]))
        self.assert_results(
            self.descending_biobank_id_query, [ps_1, ps_3],
            _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3]))

        self.assert_results(
            _with_token(self.no_filter_query,
                        _make_pagination_token(['Builder', 'Bob', None, 2])),
            [ps_3])
        self.assert_results(
            _with_token(
                self.ascending_biobank_id_query,
                _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3])),
            [ps_1])
        self.assert_results(
            _with_token(
                self.descending_biobank_id_query,
                _make_pagination_token([3, 'Caterpillar', 'Chad', None, 3])),
            [ps_2])

    def testQuery_fourFullSummaries_paginate(self):
        participant_1 = Participant(participantId=1, biobankId=4)
        self._insert(participant_1, 'Bob', 'Jones')
        participant_2 = Participant(participantId=2, biobankId=1)
        self._insert(participant_2, 'Bob', 'Jones')
        participant_3 = Participant(participantId=3, biobankId=3)
        self._insert(participant_3, 'Bob', 'Jones')
        participant_4 = Participant(participantId=4, biobankId=2)
        self._insert(participant_4, 'Bob', 'Jones')
        ps_1 = self.dao.get(1)
        ps_2 = self.dao.get(2)
        ps_3 = self.dao.get(3)
        ps_4 = self.dao.get(4)

        ps_1.lastName = 'Jones'
        ps_1.firstName = 'Bob'
        ps_1.dateOfBirth = datetime.date(1978, 10, 9)
        ps_1.hpoId = PITT_HPO_ID
        self.dao.update(ps_1)

        ps_2.lastName = 'Aardvark'
        ps_2.firstName = 'Bob'
        ps_2.dateOfBirth = datetime.date(1978, 10, 10)
        ps_2.enrollmentStatus = EnrollmentStatus.MEMBER
        self.dao.update(ps_2)

        ps_3.lastName = 'Jones'
        ps_3.firstName = 'Bob'
        ps_3.dateOfBirth = datetime.date(1978, 10, 10)
        ps_3.hpoId = PITT_HPO_ID
        ps_3.enrollmentStatus = EnrollmentStatus.MEMBER
        self.dao.update(ps_3)

        ps_4.lastName = 'Jones'
        ps_4.enrollmentStatus = EnrollmentStatus.FULL_PARTICIPANT
        self.dao.update(ps_4)

        self.assert_results(self.no_filter_query, [ps_2, ps_4],
                            _make_pagination_token(['Jones', 'Bob', None, 4]))
        self.assert_results(self.one_filter_query, [ps_1])
        self.assert_results(self.two_filter_query, [ps_1])
        self.assert_results(
            self.ascending_biobank_id_query, [ps_2, ps_4],
            _make_pagination_token([2, 'Jones', 'Bob', None, 4]))
        self.assert_results(
            self.descending_biobank_id_query, [ps_1, ps_3],
            _make_pagination_token(
                [3, 'Jones', 'Bob',
                 datetime.date(1978, 10, 10), 3]))
        self.assert_results(
            self.hpo_id_order_query, [ps_2, ps_4],
            _make_pagination_token([0, 'Jones', 'Bob', None, 4]))
        self.assert_results(
            self.enrollment_status_order_query, [ps_1, ps_2],
            _make_pagination_token(
                ['MEMBER', 'Aardvark', 'Bob',
                 datetime.date(1978, 10, 10), 2]))

        self.assert_results(
            _with_token(self.no_filter_query,
                        _make_pagination_token(['Jones', 'Bob', None, 4])),
            [ps_1, ps_3])
        self.assert_results(
            _with_token(self.ascending_biobank_id_query,
                        _make_pagination_token([2, 'Jones', 'Bob', None, 4])),
            [ps_3, ps_1])
        self.assert_results(
            _with_token(
                self.descending_biobank_id_query,
                _make_pagination_token(
                    [3, 'Jones', 'Bob',
                     datetime.date(1978, 10, 10), 3])), [ps_4, ps_2])
        self.assert_results(
            _with_token(self.hpo_id_order_query,
                        _make_pagination_token([0, 'Jones', 'Bob', None, 4])),
            [ps_1, ps_3])
        self.assert_results(
            _with_token(
                self.enrollment_status_order_query,
                _make_pagination_token([
                    'MEMBER', 'Aardvark', 'Bob',
                    datetime.date(1978, 10, 10), 2
                ])), [ps_3, ps_4])

    def test_update_from_samples(self):
        baseline_tests = ['BASELINE1', 'BASELINE2']
        config.override_setting(config.BASELINE_SAMPLE_TEST_CODES,
                                baseline_tests)
        self.dao.update_from_biobank_stored_samples()  # safe noop

        p_baseline_samples = self._insert(
            Participant(participantId=1, biobankId=11))
        p_mixed_samples = self._insert(
            Participant(participantId=2, biobankId=22))
        p_no_samples = self._insert(Participant(participantId=3, biobankId=33))
        self.assertEquals(
            self.dao.get(
                p_baseline_samples.participantId).numBaselineSamplesArrived, 0)

        sample_dao = BiobankStoredSampleDao()

        def add_sample(participant, test_code, sample_id):
            sample_dao.insert(
                BiobankStoredSample(biobankStoredSampleId=sample_id,
                                    biobankId=participant.biobankId,
                                    test=test_code))

        add_sample(p_baseline_samples, baseline_tests[0], '11111')
        add_sample(p_baseline_samples, baseline_tests[1], '22223')
        add_sample(p_mixed_samples, baseline_tests[0], '11112')
        add_sample(p_mixed_samples, 'NOT1', '44441')

        self.dao.update_from_biobank_stored_samples()
        self.assertEquals(
            self.dao.get(
                p_baseline_samples.participantId).numBaselineSamplesArrived, 2)
        self.assertEquals(
            self.dao.get(
                p_mixed_samples.participantId).numBaselineSamplesArrived, 1)
        self.assertEquals(
            self.dao.get(p_no_samples.participantId).numBaselineSamplesArrived,
            0)

    def test_calculate_enrollment_status(self):
        self.assertEquals(
            EnrollmentStatus.FULL_PARTICIPANT,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.RECEIVED))
        self.assertEquals(
            EnrollmentStatus.MEMBER,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES - 1,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.RECEIVED))
        self.assertEquals(
            EnrollmentStatus.MEMBER,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.UNSET, SampleStatus.RECEIVED))
        self.assertEquals(
            EnrollmentStatus.MEMBER,
            self.dao.calculate_enrollment_status(
                True, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.UNSET))
        self.assertEquals(
            EnrollmentStatus.INTERESTED,
            self.dao.calculate_enrollment_status(
                False, NUM_BASELINE_PPI_MODULES,
                PhysicalMeasurementsStatus.COMPLETED, SampleStatus.RECEIVED))
Exemple #16
0
class DvOrderApiTestBase(FlaskTestBase):
    mayolink_response = None

    def setUp(self, use_mysql=True, with_data=True):
        super(DvOrderApiTestBase, self).setUp(use_mysql=use_mysql,
                                              with_data=with_data)

        self.test_data = {
            "subject": "Patient/P123456789",
            "awardee": "PITT",
            "organization": "PITT_BANNER_HEALTH",
            "patient_status": "YES",
            "user": "******",
            "site": "hpo-site-monroeville",
            "authored": "2019-04-26T12:11:41",
            "comment": "This is comment"
        }

        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.hpo_dao = HPODao()

        self.hpo = self.hpo_dao.get_by_name('PITT')

        self.participant = Participant(hpoId=self.hpo.hpoId,
                                       participantId=123456789,
                                       biobankId=7)
        self.participant_dao.insert(self.participant)
        self.summary = self.participant_summary(self.participant)
        self.summary_dao.insert(self.summary)

    def test_patient_status_created(self):
        data = copy.copy(self.test_data)

        # insert patient status
        url = os.path.join('PatientStatus',
                           'P{0}'.format(self.participant.participantId),
                           'Organization', 'PITT_BANNER_HEALTH')
        resp = self.send_post(url, data, expected_status=httplib.CREATED)

        # test that our test_data dict is in the resp.response dict.
        resp_data = json.loads(resp.response[0])
        self.assertDictContainsSubset(data, resp_data)

        # attempt to insert again, should fail with duplicate.
        self.send_post(url, data, expected_status=httplib.CONFLICT)

        # Get record and test that our test_data dict is in the resp.response dict.
        resp = self.send_get(url)
        self.assertDictContainsSubset(data, resp)

    def test_patient_status_udpated(self):
        data = copy.copy(self.test_data)

        # insert patient status
        url = os.path.join('PatientStatus',
                           'P{0}'.format(self.participant.participantId),
                           'Organization', 'PITT_BANNER_HEALTH')
        resp = self.send_post(url, data, expected_status=httplib.CREATED)

        data['authored'] = '2019-04-27T16:32:01'
        data['comment'] = 'saw patient at new site'
        data['site'] = 'hpo-site-bannerphoenix'

        resp = self.send_put(url, data, expected_status=httplib.OK)
        self.assertDictContainsSubset(data, resp)

        # Get record and test that our test_data dict is in the resp.response dict.
        resp = self.send_get(url)
        self.assertDictContainsSubset(data, resp)
Exemple #17
0
class ParticipantDaoTest(SqlTestBase):
    def setUp(self):
        super(ParticipantDaoTest, self).setUp()
        self.dao = ParticipantDao()
        self.participant_summary_dao = ParticipantSummaryDao()
        self.participant_history_dao = ParticipantHistoryDao()

    def test_get_before_insert(self):
        self.assertIsNone(self.dao.get(1))
        self.assertIsNone(self.participant_summary_dao.get(1))
        self.assertIsNone(self.participant_history_dao.get([1, 1]))

    def test_insert(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=1,
            biobankId=2,
            lastModified=time,
            signUpTime=time)
        self.assertEquals(expected_participant.asdict(), p.asdict())

        p2 = self.dao.get(1)
        self.assertEquals(p.asdict(), p2.asdict())

        # Creating a participant also creates a ParticipantHistory row, but not a ParticipantSummary row
        ps = self.participant_summary_dao.get(1)
        self.assertIsNone(ps)
        ph = self.participant_history_dao.get([1, 1])
        expected_ph = self._participant_history_with_defaults(
            participantId=1, biobankId=2, lastModified=time, signUpTime=time)
        self.assertEquals(expected_ph.asdict(), ph.asdict())

    def test_insert_with_external_id(self):
        p = Participant(externalId=3)
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            externalId=3,
            version=1,
            biobankId=2,
            lastModified=time,
            signUpTime=time)
        self.assertEquals(expected_participant.asdict(), p.asdict())

        p2 = self.dao.get(1)
        self.assertEquals(p.asdict(), p2.asdict())

        # Creating a participant also creates a ParticipantHistory row, but not a ParticipantSummary row
        ps = self.participant_summary_dao.get(1)
        self.assertIsNone(ps)
        ph = self.participant_history_dao.get([1, 1])
        expected_ph = self._participant_history_with_defaults(
            participantId=1,
            externalId=3,
            biobankId=2,
            lastModified=time,
            signUpTime=time)
        self.assertEquals(expected_ph.asdict(), ph.asdict())

    def test_insert_duplicate_participant_id_retry(self):
        p = Participant()
        with random_ids([1, 2]):
            self.dao.insert(p)
        p2 = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 3, 2, 3]):
            with FakeClock(time):
                p2 = self.dao.insert(p2)
        expected_participant = self._participant_with_defaults(
            participantId=2,
            version=1,
            biobankId=3,
            lastModified=time,
            signUpTime=time)
        self.assertEquals(expected_participant.asdict(), p2.asdict())

    def test_insert_duplicate_participant_id_give_up(self):
        p = Participant()
        with random_ids([1, 2]):
            self.dao.insert(p)
        rand_ints = []
        for i in range(0, MAX_INSERT_ATTEMPTS):
            rand_ints.append(1)
            rand_ints.append(i)
        p2 = Participant()
        with random_ids(rand_ints):
            with self.assertRaises(ServiceUnavailable):
                self.dao.insert(p2)

    def test_insert_duplicate_biobank_id_give_up(self):
        p = Participant()
        with random_ids([1, 2]):
            self.dao.insert(p)
        rand_ints = []
        for i in range(0, MAX_INSERT_ATTEMPTS):
            rand_ints.append(i + 2)
            rand_ints.append(2)
        p2 = Participant()
        with random_ids(rand_ints):
            with self.assertRaises(ServiceUnavailable):
                self.dao.insert(p2)

    def test_update_no_expected_version_no_ps(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)

        p.providerLink = make_primary_provider_link_for_name('PITT')
        time2 = datetime.datetime(2016, 1, 2)
        with FakeClock(time2):
            self.dao.update(p)
        # lastModified, hpoId, version is updated on p after being passed in
        p2 = self.dao.get(1)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=2,
            biobankId=2,
            lastModified=time2,
            signUpTime=time,
            hpoId=PITT_HPO_ID,
            providerLink=p2.providerLink)
        self.assertEquals(expected_participant.asdict(), p2.asdict())
        self.assertEquals(p.asdict(), p2.asdict())

        ps = self.participant_summary_dao.get(1)
        self.assertIsNone(ps)

        expected_ph = self._participant_history_with_defaults(
            participantId=1, biobankId=2, lastModified=time, signUpTime=time)
        # Updating the participant adds a new ParticipantHistory row.
        ph = self.participant_history_dao.get([1, 1])
        self.assertEquals(expected_ph.asdict(), ph.asdict())
        ph2 = self.participant_history_dao.get([1, 2])
        expected_ph2 = self._participant_history_with_defaults(
            participantId=1,
            version=2,
            biobankId=2,
            lastModified=time2,
            signUpTime=time,
            hpoId=PITT_HPO_ID,
            providerLink=p2.providerLink)
        self.assertEquals(expected_ph2.asdict(), ph2.asdict())

    def test_update_no_expected_version_with_ps(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)
        p.providerLink = make_primary_provider_link_for_name('PITT')
        time2 = datetime.datetime(2016, 1, 2)
        with FakeClock(time2):
            self.dao.update(p)

        summary = self.participant_summary(p)
        self.participant_summary_dao.insert(summary)

        # lastModified, hpoId, version is updated on p after being passed in
        p2 = self.dao.get(1)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=2,
            biobankId=2,
            lastModified=time2,
            signUpTime=time,
            hpoId=PITT_HPO_ID,
            providerLink=p2.providerLink)
        self.assertEquals(expected_participant.asdict(), p2.asdict())
        self.assertEquals(p.asdict(), p2.asdict())

        # Updating the participant provider link also updates the HPO ID on the participant summary.
        ps = self.participant_summary_dao.get(1)
        expected_ps = self._participant_summary_with_defaults(
            participantId=1,
            biobankId=2,
            signUpTime=time,
            hpoId=PITT_HPO_ID,
            lastModified=time2,
            firstName=summary.firstName,
            lastName=summary.lastName,
            email=summary.email)
        self.assertEquals(expected_ps.asdict(), ps.asdict())

        p2_last_modified = p2.lastModified
        p2.hpoId = 2
        self.dao.update(p2)
        p2_update = self.dao.get(1)
        self.assertNotEquals(p2_last_modified, p2_update.lastModified)
        self.assertEquals(p2_update.lastModified, p2.lastModified)

        expected_ph = self._participant_history_with_defaults(
            participantId=1, biobankId=2, lastModified=time, signUpTime=time)
        # And updating the participant adds a new ParticipantHistory row.
        ph = self.participant_history_dao.get([1, 1])
        self.assertEquals(expected_ph.asdict(), ph.asdict())
        ph2 = self.participant_history_dao.get([1, 2])
        expected_ph2 = self._participant_history_with_defaults(
            participantId=1,
            version=2,
            biobankId=2,
            lastModified=time2,
            signUpTime=time,
            hpoId=PITT_HPO_ID,
            providerLink=p2.providerLink)
        self.assertEquals(expected_ph2.asdict(), ph2.asdict())

    def test_update_right_expected_version(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)
        p.version = 1
        p.providerLink = make_primary_provider_link_for_name('PITT')
        time2 = datetime.datetime(2016, 1, 2)
        with FakeClock(time2):
            self.dao.update(p)

        p2 = self.dao.get(1)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=2,
            biobankId=2,
            lastModified=time2,
            signUpTime=time,
            hpoId=PITT_HPO_ID,
            providerLink=p2.providerLink)
        self.assertEquals(expected_participant.asdict(), p2.asdict())

    def test_update_withdraw(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)
        p.version = 1
        p.withdrawalStatus = WithdrawalStatus.NO_USE
        time2 = datetime.datetime(2016, 1, 2)
        with FakeClock(time2):
            self.dao.update(p)

        p2 = self.dao.get(1)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=2,
            biobankId=2,
            lastModified=time2,
            signUpTime=time,
            withdrawalStatus=WithdrawalStatus.NO_USE,
            withdrawalTime=time2)
        self.assertEquals(expected_participant.asdict(), p2.asdict())

        p.version = 2
        p.providerLink = make_primary_provider_link_for_name('PITT')
        p.withdrawalTime = None
        time3 = datetime.datetime(2016, 1, 3)
        with FakeClock(time3):
            self.dao.update(p)

        # Withdrawal time should get copied over.
        p2 = self.dao.get(1)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=3,
            biobankId=2,
            lastModified=time3,
            signUpTime=time,
            withdrawalStatus=WithdrawalStatus.NO_USE,
            withdrawalTime=time2,
            hpoId=PITT_HPO_ID,
            providerLink=p2.providerLink)
        self.assertEquals(expected_participant.asdict(), p2.asdict())

    def test_update_suspend(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)
        p.version = 1
        p.suspensionStatus = SuspensionStatus.NO_CONTACT
        time2 = datetime.datetime(2016, 1, 2)
        with FakeClock(time2):
            self.dao.update(p)

        p2 = self.dao.get(1)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=2,
            biobankId=2,
            lastModified=time2,
            signUpTime=time,
            suspensionStatus=SuspensionStatus.NO_CONTACT,
            suspensionTime=time2)
        self.assertEquals(expected_participant.asdict(), p2.asdict())

        p.version = 2
        p.providerLink = make_primary_provider_link_for_name('PITT')
        p.suspensionTime = None
        time3 = datetime.datetime(2016, 1, 3)
        with FakeClock(time3):
            self.dao.update(p)

        # Withdrawal time should get copied over.
        p2 = self.dao.get(1)
        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=3,
            biobankId=2,
            lastModified=time3,
            signUpTime=time,
            suspensionStatus=SuspensionStatus.NO_CONTACT,
            suspensionTime=time2,
            hpoId=PITT_HPO_ID,
            providerLink=p2.providerLink)
        self.assertEquals(expected_participant.asdict(), p2.asdict())

    def test_update_wrong_expected_version(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)

        p.version = 2
        p.providerLink = make_primary_provider_link_for_name('PITT')
        time2 = datetime.datetime(2016, 1, 2)
        with FakeClock(time2):
            with self.assertRaises(PreconditionFailed):
                self.dao.update(p)

    def test_update_withdrawn_hpo_succeeds(self):
        p = Participant(withdrawalStatus=WithdrawalStatus.NO_USE)
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)

        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=1,
            biobankId=2,
            lastModified=time,
            signUpTime=time,
            withdrawalStatus=WithdrawalStatus.NO_USE)
        self.assertEquals(expected_participant.asdict(), p.asdict())

        p2 = self.dao.get(1)
        self.assertEquals(p.asdict(), p2.asdict())

        p.version = 1
        p.providerLink = make_primary_provider_link_for_name('PITT')
        self.dao.update(p)

    def test_update_withdrawn_status_fails(self):
        p = Participant(withdrawalStatus=WithdrawalStatus.NO_USE)
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)

        expected_participant = self._participant_with_defaults(
            participantId=1,
            version=1,
            biobankId=2,
            lastModified=time,
            signUpTime=time,
            withdrawalStatus=WithdrawalStatus.NO_USE)
        self.assertEquals(expected_participant.asdict(), p.asdict())

        p2 = self.dao.get(1)
        self.assertEquals(p.asdict(), p2.asdict())

        p.version = 1
        p.withdrawalStatus = WithdrawalStatus.NOT_WITHDRAWN
        with self.assertRaises(Forbidden):
            self.dao.update(p)

    def test_update_not_exists(self):
        p = self._participant_with_defaults(participantId=1, biobankId=2)
        with self.assertRaises(NotFound):
            self.dao.update(p)

    def test_bad_hpo_insert(self):
        p = Participant(
            participantId=1,
            version=1,
            biobankId=2,
            providerLink=make_primary_provider_link_for_name('FOO'))
        with self.assertRaises(BadRequest):
            self.dao.insert(p)

    def test_bad_hpo_update(self):
        p = Participant(participantId=1, biobankId=2)
        time = datetime.datetime(2016, 1, 1)
        with FakeClock(time):
            self.dao.insert(p)

        p.providerLink = make_primary_provider_link_for_name('FOO')
        with self.assertRaises(BadRequest):
            self.dao.update(p)

    def test_pairs_unset(self):
        participant_id = 22
        self.dao.insert(Participant(participantId=participant_id, biobankId=2))
        refetched = self.dao.get(participant_id)
        self.assertEquals(refetched.hpoId, UNSET_HPO_ID)  # sanity check
        self.participant_summary_dao.insert(
            self.participant_summary(refetched))

        with self.dao.session() as session:
            self.dao.add_missing_hpo_from_site(session, participant_id,
                                               self._test_db.site_id)

        paired = self.dao.get(participant_id)
        self.assertEquals(paired.hpoId, self._test_db.hpo_id)
        self.assertEquals(
            paired.providerLink,
            make_primary_provider_link_for_id(self._test_db.hpo_id))
        self.assertEquals(
            self.participant_summary_dao.get(participant_id).hpoId,
            self._test_db.hpo_id)
        self.assertEquals(paired.organizationId, self._test_db.organization_id)
        self.assertEquals(paired.siteId, self._test_db.site_id)

    def test_overwrite_existing_pairing(self):
        participant_id = 99
        created = self.dao.insert(
            Participant(participantId=participant_id,
                        biobankId=2,
                        hpoId=self._test_db.hpo_id,
                        providerLink=make_primary_provider_link_for_id(
                            self._test_db.hpo_id)))
        self.participant_summary_dao.insert(self.participant_summary(created))
        self.assertEquals(created.hpoId, self._test_db.hpo_id)  # sanity check

        other_hpo = HPODao().insert(
            HPO(hpoId=PITT_HPO_ID + 1, name='DIFFERENT_HPO'))
        other_site = SiteDao().insert(
            Site(hpoId=other_hpo.hpoId,
                 siteName='Arbitrary Site',
                 googleGroup='*****@*****.**'))

        with self.dao.session() as session:
            self.dao.add_missing_hpo_from_site(session, participant_id,
                                               other_site.siteId)

        # Original Participant + summary is affected.
        refetched = self.dao.get(participant_id)

        self.assertEquals(refetched.hpoId, other_hpo.hpoId)
        self.assertEquals(refetched.providerLink,
                          make_primary_provider_link_for_id(other_hpo.hpoId))
        self.assertEquals(
            self.participant_summary_dao.get(participant_id).hpoId,
            other_hpo.hpoId)

    def test_pairing_at_different_levels(self):
        p = Participant()
        time = datetime.datetime(2016, 1, 1)
        with random_ids([1, 2]):
            with FakeClock(time):
                self.dao.insert(p)

        p.version = 1
        p.siteId = 1
        time2 = datetime.datetime(2016, 1, 2)
        with FakeClock(time2):
            self.dao.update(p)

        p2 = self.dao.get(1)
        ep = self._participant_with_defaults(participantId=1,
                                             version=2,
                                             biobankId=2,
                                             lastModified=time2,
                                             signUpTime=time,
                                             hpoId=PITT_HPO_ID,
                                             siteId=1,
                                             organizationId=PITT_ORG_ID,
                                             providerLink=p2.providerLink)
        self.assertEquals(ep.siteId, p2.siteId)
        # ensure that p2 get paired with expected awardee and organization from update().
        self.assertEquals(ep.hpoId, p2.hpoId)
        self.assertEquals(ep.organizationId, p2.organizationId)
class SyncConsentFilesTest(CloudStorageSqlTestBase, NdbTestBase):
    """Tests behavior of sync_consent_files
  """
    def setUp(self, **kwargs):
        super(SyncConsentFilesTest, self).setUp(use_mysql=True, **kwargs)
        NdbTestBase.doSetUp(self)
        TestBase.setup_fake(self)
        self.org_dao = OrganizationDao()
        self.site_dao = SiteDao()
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()

    def tearDown(self):
        super(SyncConsentFilesTest, self).tearDown()

    def _create_org(self, id_):
        org = Organization(organizationId=id_,
                           externalId=id_,
                           displayName=id_,
                           hpoId=UNSET_HPO_ID)
        self.org_dao.insert(org)
        return org

    def _create_site(self, id_, google_group):
        site = Site(siteId=id_, siteName=id_, googleGroup=google_group)
        self.site_dao.insert(site)
        return site

    def _create_participant(self,
                            id_,
                            org_id,
                            site_id,
                            consents=False,
                            ghost=None,
                            email=None,
                            null_email=False):
        participant = Participant(participantId=id_,
                                  biobankId=id_,
                                  organizationId=org_id,
                                  siteId=site_id,
                                  isGhostId=ghost)
        self.participant_dao.insert(participant)
        summary = self.participant_summary(participant)
        if consents:
            summary.consentForElectronicHealthRecords = 1
            summary.consentForStudyEnrollment = 1
        if email:
            summary.email = email
        if null_email:
            summary.email = None
        self.summary_dao.insert(summary)
        return participant

    def test_iter_participants_data(self):
        """should list consenting participants
    """
        org1 = self._create_org(1)
        org2 = self._create_org(2)
        site1 = self._create_site(1001, 'group1')
        site2 = self._create_site(1002, 'group2')
        self._create_participant(1,
                                 org1.organizationId,
                                 site1.siteId,
                                 consents=True,
                                 null_email=True)
        self._create_participant(2, org2.organizationId, site2.siteId)
        self._create_participant(3,
                                 org1.organizationId,
                                 None,
                                 consents=True,
                                 ghost=False)
        self._create_participant(4,
                                 org1.organizationId,
                                 None,
                                 consents=True,
                                 ghost=True)
        self._create_participant(5,
                                 org1.organizationId,
                                 None,
                                 consents=True,
                                 email='*****@*****.**')
        participant_data_list = list(
            sync_consent_files._iter_participants_data())
        participant_ids = [d.participant_id for d in participant_data_list]
        self.assertEqual(len(participant_ids), 2,
                         "finds correct number of results")
        self.assertEqual(participant_ids, [1, 3], "finds valid participants")
        self.assertEqual(participant_data_list[0].google_group, 'group1',
                         "Includes google group")
        self.assertEqual(participant_data_list[1].google_group, None,
                         "allows None for google group")

    @mock.patch('cloudstorage.listbucket')
    @mock.patch('cloudstorage.copy2')
    def test_cloudstorage_copy_objects_api_calls(self, mock_copy2,
                                                 mock_listbucket):
        """Makes the proper google cloudstorage API calls
    """
        mock_listbucket.return_value = [
            cloudstorage.common.GCSFileStat('/fake_bucket1/prefix1/foo', 0,
                                            'x', 0),
            cloudstorage.common.GCSFileStat('/fake_bucket1/prefix1/bar', 0,
                                            'x', 0),
        ]
        # with trailing slashes
        sync_consent_files.cloudstorage_copy_objects('/fake_bucket1/prefix1/',
                                                     '/fake_bucket2/prefix2/')
        mock_copy2.assert_has_calls([
            mock.call('/fake_bucket1/prefix1/foo',
                      '/fake_bucket2/prefix2/foo'),
            mock.call('/fake_bucket1/prefix1/bar',
                      '/fake_bucket2/prefix2/bar'),
        ])
        # without trailing slashes
        sync_consent_files.cloudstorage_copy_objects('/fake_bucket1/prefix1',
                                                     '/fake_bucket2/prefix2')
        mock_copy2.assert_has_calls([
            mock.call('/fake_bucket1/prefix1/foo',
                      '/fake_bucket2/prefix2/foo'),
            mock.call('/fake_bucket1/prefix1/bar',
                      '/fake_bucket2/prefix2/bar'),
        ])

    @staticmethod
    def _write_cloud_object(cloud_filename, contents_str):
        with cloudstorage.cloudstorage_api.open(cloud_filename,
                                                mode='w') as cloud_file:
            cloud_file.write(contents_str.encode('utf-8'))

    def test_cloudstorage_copy_objects_actual(self):
        self._write_cloud_object('/fake_bucket1/prefix/x1/foo.txt', 'foo')
        self._write_cloud_object('/fake_bucket1/prefix/x1/bar.txt', 'bar')
        self._write_cloud_object('/fake_bucket1/prefix/x1/y1/foo.txt', 'foo')
        with cloudstorage.cloudstorage_api.open(
                '/fake_bucket1/prefix/x1/foo.txt', mode='r') as f:
            self.assertEqual(f.read(), 'foo', 'Wrote to cloud storage')
        sync_consent_files.cloudstorage_copy_objects(
            '/fake_bucket1/prefix/x1/', '/fake_bucket2/prefix/z/x1/')
        self.assertEqual([
            file_stat.filename
            for file_stat in cloudstorage.cloudstorage_api.listbucket(
                '/fake_bucket2/prefix/z/x1/')
        ], [
            '/fake_bucket2/prefix/z/x1/bar.txt',
            '/fake_bucket2/prefix/z/x1/foo.txt',
            '/fake_bucket2/prefix/z/x1/y1/foo.txt',
        ], "copied all objects")
        with cloudstorage.cloudstorage_api.open(
                '/fake_bucket2/prefix/z/x1/foo.txt', mode='r') as f:
            self.assertEqual(f.read(), 'foo', 'copied contents')

    @mock.patch('cloudstorage.copy2')
    def test_cloudstorage_copy_objects_only_new_and_changed(self, copy2):
        self._write_cloud_object('/fake_bucket1/prefix/x1/foo.txt', 'foo')
        self._write_cloud_object('/fake_bucket1/prefix/x1/bar.txt', 'bar')
        self._write_cloud_object('/fake_bucket2/prefix/z/x1/foo.txt', 'foo')
        self._write_cloud_object('/fake_bucket2/prefix/z/x1/bar.txt', 'baz')
        sync_consent_files.cloudstorage_copy_objects(
            '/fake_bucket1/prefix/x1/', '/fake_bucket2/prefix/z/x1/')
        copy2.assert_called_once_with('/fake_bucket1/prefix/x1/bar.txt',
                                      '/fake_bucket2/prefix/z/x1/bar.txt')
Exemple #19
0
class BiobankOrderApiTest(FlaskTestBase):
    def setUp(self):
        super(BiobankOrderApiTest, self).setUp(use_mysql=True)
        self.participant = Participant(participantId=123, biobankId=555)
        self.participant_dao = ParticipantDao()
        self.participant_dao.insert(self.participant)
        self.summary_dao = ParticipantSummaryDao()
        self.bio_dao = BiobankOrderDao()
        self.path = ('Participant/%s/BiobankOrder' %
                     to_client_participant_id(self.participant.participantId))

    def test_cancel_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)
        full_order_json = load_biobank_order_json(
            self.participant.participantId, filename='biobank_order_1.json')
        _strip_fields(result)
        _strip_fields(full_order_json)
        self.assertEquals(full_order_json, result)

        biobank_order_id = result['identifier'][1]['value']
        path = self.path + '/' + biobank_order_id
        request_data = {
            "amendedReason": "Its all wrong",
            "cancelledInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            },
            "status": "cancelled"
        }
        cancelled_order = self.send_patch(path,
                                          request_data=request_data,
                                          headers={'If-Match': 'W/"1"'})
        get_cancelled_order = self.send_get(path)
        get_summary = self.summary_dao.get(self.participant.participantId)

        self.assertEqual(get_summary.biospecimenSourceSiteId, None)
        self.assertEqual(get_summary.biospecimenCollectedSiteId, None)
        self.assertEqual(get_summary.biospecimenOrderTime, None)
        self.assertEqual(get_summary.biospecimenStatus, None)
        self.assertEqual(get_summary.biospecimenFinalizedSiteId, None)
        self.assertEqual(get_summary.biospecimenProcessedSiteId, None)
        self.assertEqual(get_summary.sampleOrderStatus2ED10, None)
        self.assertEqual(get_summary.sampleOrderStatus2ED10Time, None)
        self.assertEqual(get_summary.sampleStatus2ED10, None)
        self.assertEqual(get_summary.sampleStatus2ED10Time, None)
        self.assertEqual(get_summary.sampleOrderStatus1PST8, None)
        self.assertEqual(get_summary.sampleOrderStatus1PST8Time, None)
        self.assertEqual(get_summary.sampleStatus1PST8, None)
        self.assertEqual(get_summary.sampleStatus1PST8Time, None)
        self.assertEqual(get_summary.sampleOrderStatus1PS08, None)
        self.assertEqual(get_summary.sampleOrderStatus1PS08Time, None)
        self.assertEqual(get_summary.sampleStatus1PS08, None)
        self.assertEqual(get_summary.sampleStatus1PS08Time, None)
        self.assertEqual(get_summary.sampleOrderStatus2PST8, None)
        self.assertEqual(get_summary.sampleOrderStatus2PST8Time, None)
        self.assertEqual(get_summary.sampleStatus2PST8, None)
        self.assertEqual(get_summary.sampleStatus2PST8Time, None)
        self.assertEqual(get_summary.sampleOrderStatus1PXR2, None)
        self.assertEqual(get_summary.sampleOrderStatus1PXR2Time, None)
        self.assertEqual(get_summary.sampleStatus1PXR2, None)
        self.assertEqual(get_summary.sampleStatus1PXR2Time, None)
        self.assertEqual(get_summary.sampleOrderStatus1CFD9, None)
        self.assertEqual(get_summary.sampleOrderStatus1CFD9Time, None)
        self.assertEqual(get_summary.sampleStatus1CFD9, None)
        self.assertEqual(get_summary.sampleStatus1CFD9Time, None)
        self.assertEqual(get_summary.sampleOrderStatus1ED02, None)
        self.assertEqual(get_summary.sampleOrderStatus1ED02Time, None)
        self.assertEqual(get_summary.sampleStatus1ED02, None)
        self.assertEqual(get_summary.sampleStatus1ED02Time, None)
        self.assertEqual(cancelled_order, get_cancelled_order)
        self.assertEqual(get_cancelled_order['status'], 'CANCELLED')
        self.assertEqual(get_cancelled_order['amendedReason'], 'Its all wrong')
        self.assertEqual(
            get_cancelled_order['cancelledInfo']['author']['value'],
            '*****@*****.**')
        self.assertEqual(get_cancelled_order['cancelledInfo']['site']['value'],
                         'hpo-site-monroeville')

    def test_cancel_one_order_with_another_good_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename="biobank_order_1.json")
        order_json2 = load_biobank_order_json(self.participant.participantId,
                                              filename="biobank_order_2.json")
        order_json2['identifier'][0]['value'] = 'healthpro-order-id-1231234'
        order_json2['identifier'][1]['value'] = 'WEB1YLHV1234'
        result = self.send_post(self.path, order_json)
        self.send_post(self.path, order_json2)

        biobank_order_id = result["identifier"][1]["value"]
        path = self.path + "/" + biobank_order_id
        request_data = {
            "amendedReason": "Its all wrong",
            "cancelledInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                },
            },
            "status": "cancelled",
        }
        self.send_patch(path,
                        request_data=request_data,
                        headers={"If-Match": 'W/"1"'})

        get_summary = self.summary_dao.get(self.participant.participantId)

        self.assertEqual(get_summary.biospecimenSourceSiteId, 1)
        self.assertEqual(get_summary.biospecimenCollectedSiteId, 1)
        self.assertEqual(get_summary.biospecimenFinalizedSiteId, 2)

    def test_you_can_not_cancel_a_cancelled_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)

        biobank_order_id = result['identifier'][1]['value']
        path = self.path + '/' + biobank_order_id
        request_data = {
            "amendedReason": "Its all wrong",
            "cancelledInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            },
            "status": "cancelled"
        }
        self.send_patch(path,
                        request_data=request_data,
                        headers={'If-Match': 'W/"1"'})

        self.send_patch(path,
                        request_data=request_data,
                        headers={'If-Match': 'W/"2"'},
                        expected_status=httplib.BAD_REQUEST)

    def test_you_can_not_restore_a_not_cancelled_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)

        biobank_order_id = result['identifier'][1]['value']
        path = self.path + '/' + biobank_order_id
        request_data = {
            "amendedReason": "Its all wrong",
            "restoredInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            },
            "status": "restored"
        }
        self.send_patch(path,
                        request_data=request_data,
                        headers={'If-Match': 'W/"1"'},
                        expected_status=httplib.BAD_REQUEST)

    def test_restore_an_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)
        full_order_json = load_biobank_order_json(
            self.participant.participantId, filename='biobank_order_1.json')
        _strip_fields(result)
        _strip_fields(full_order_json)
        self.assertEquals(full_order_json, result)

        biobank_order_id = result['identifier'][1]['value']
        path = self.path + '/' + biobank_order_id
        request_data = {
            "amendedReason": "Its all wrong",
            "cancelledInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            },
            "status": "cancelled"
        }
        self.send_patch(path,
                        request_data=request_data,
                        headers={'If-Match': 'W/"1"'})

        request_data = {
            "amendedReason": "I didnt mean to cancel",
            "restoredInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            },
            "status": "restored"
        }

        self.send_patch(path,
                        request_data=request_data,
                        headers={'If-Match': 'W/"2"'})
        restored_order = self.send_get(path)
        get_summary = self.summary_dao.get(self.participant.participantId)
        self.assertEqual(get_summary.sampleOrderStatus1SST8,
                         OrderStatus.CREATED)
        self.assertEqual(get_summary.sampleOrderStatus2ED10,
                         OrderStatus.CREATED)
        self.assertEqual(get_summary.sampleOrderStatus1SAL,
                         OrderStatus.CREATED)
        self.assertEqual(get_summary.sampleOrderStatus1UR10,
                         OrderStatus.CREATED)
        self.assertEqual(get_summary.sampleOrderStatus1CFD9,
                         OrderStatus.FINALIZED)
        self.assertEqual(get_summary.sampleOrderStatus1ED02,
                         OrderStatus.FINALIZED)
        self.assertEqual(get_summary.sampleOrderStatus2SST8,
                         OrderStatus.FINALIZED)
        self.assertEqual(get_summary.sampleOrderStatus2PST8,
                         OrderStatus.FINALIZED)
        self.assertEqual(restored_order['status'], 'UNSET')
        self.assertEqual(restored_order['restoredInfo']['author']['value'],
                         '*****@*****.**')
        self.assertEqual(restored_order['restoredInfo']['site']['value'],
                         'hpo-site-monroeville')
        self.assertEqual(restored_order['amendedReason'],
                         'I didnt mean to cancel')

    def test_amending_an_order(self):
        # pylint: disable=unused-variable
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)

        biobank_order_id = result['identifier'][1]['value']
        path = self.path + '/' + biobank_order_id
        request_data = {
            "amendedReason": "Its all better",
            "amendedInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-bannerphoenix"
                }
            }
        }

        biobank_order_identifiers = {
            "created": "2018-02-21T16:25:12",
            "createdInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-clinic-phoenix"
                }
            }
        }
        get_order = self.send_get(path)
        full_order = get_order.copy()
        full_order.update(request_data)
        full_order.update(biobank_order_identifiers)

        self.assertEqual(len(full_order['samples']), 16)
        del full_order['samples'][0]

        self.send_put(path,
                      request_data=full_order,
                      headers={'If-Match': 'W/"1"'})

        get_amended_order = self.send_get(path)
        get_summary = self.summary_dao.get(self.participant.participantId)
        self.assertEqual(get_summary.biospecimenProcessedSiteId, 1)
        self.assertEqual(get_summary.biospecimenFinalizedSiteId, 2)
        self.assertEqual(get_summary.biospecimenCollectedSiteId, 1)
        self.assertEqual(get_summary.sampleOrderStatus2PST8,
                         OrderStatus.FINALIZED)
        self.assertEqual(get_summary.sampleOrderStatus1PS08,
                         OrderStatus.FINALIZED)
        self.assertEqual(get_summary.sampleOrderStatus1PST8,
                         OrderStatus.FINALIZED)
        self.assertEqual(get_summary.sampleOrderStatus1SST8,
                         OrderStatus.CREATED)
        self.assertEqual(get_summary.sampleOrderStatus2ED10,
                         OrderStatus.CREATED)
        self.assertEqual(len(get_amended_order['samples']), 15)
        self.assertEqual(get_amended_order['meta'], {'versionId': 'W/"2"'})
        self.assertEqual(get_amended_order['amendedReason'], 'Its all better')
        self.assertEqual(get_amended_order['amendedInfo']['author']['value'],
                         '*****@*****.**')
        self.assertEqual(get_amended_order['amendedInfo']['site']['value'],
                         'hpo-site-bannerphoenix')
        self.assertEqual(get_amended_order['createdInfo']['site']['value'],
                         'hpo-site-clinic-phoenix')
        self.assertEqual(get_amended_order['createdInfo']['author']['value'],
                         '*****@*****.**')
        self.assertEqual(get_amended_order['created'], "2018-02-21T16:25:12")
        self.assertEqual(get_amended_order['status'], "AMENDED")

    def test_amend_a_restored_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)
        full_order_json = load_biobank_order_json(
            self.participant.participantId, filename='biobank_order_1.json')
        _strip_fields(result)
        _strip_fields(full_order_json)

        biobank_order_id = result['identifier'][1]['value']
        path = self.path + '/' + biobank_order_id
        request_data = {
            "amendedReason": "Its all wrong",
            "cancelledInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            },
            "status": "cancelled"
        }
        self.send_patch(path,
                        request_data=request_data,
                        headers={'If-Match': 'W/"1"'})
        self.send_get(path)
        request_data = {
            "amendedReason": "I didnt mean to cancel",
            "restoredInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            },
            "status": "restored"
        }

        self.send_patch(path,
                        request_data=request_data,
                        headers={'If-Match': 'W/"2"'})

        request_data = {
            "amendedReason":
            "Its all better",
            "samples": [{
                "test": "1ED10",
                "description": "EDTA 10 mL (1)",
                "processingRequired": False,
                "collected": "2016-01-04T09:45:49Z",
                "finalized": "2016-01-04T10:55:41Z"
            }, {
                "test": "1PST8",
                "description": "Plasma Separator 8 mL",
                "collected": "2016-01-04T09:45:49Z",
                "processingRequired": True,
                "processed": "2016-01-04T10:28:50Z",
                "finalized": "2016-01-04T10:55:41Z"
            }],
            "amendedInfo": {
                "author": {
                    "system": "https://www.pmi-ops.org/healthpro-username",
                    "value": "*****@*****.**"
                },
                "site": {
                    "system": "https://www.pmi-ops.org/site-id",
                    "value": "hpo-site-monroeville"
                }
            }
        }
        get_order = self.send_get(path)
        full_order = get_order.copy()
        full_order.update(request_data)
        self.send_put(path,
                      request_data=full_order,
                      headers={'If-Match': 'W/"3"'})

        get_amended_order = self.send_get(path)
        self.assertEqual(len(get_amended_order['samples']), 2)
        self.assertEqual(get_amended_order['amendedInfo']['author']['value'],
                         '*****@*****.**')
        self.assertEqual(get_amended_order['status'], 'AMENDED')
        self.assertEqual(get_amended_order.get('restoredSiteId'), None)
        self.assertEqual(get_amended_order.get('restoredUsername'), None)
        self.assertEqual(get_amended_order.get('restoredTime'), None)
        self.assertEqual(get_amended_order['meta'], {'versionId': 'W/"4"'})

    def test_insert_and_refetch(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        self.create_and_verify_created_obj(
            self.path, load_biobank_order_json(self.participant.participantId))

    def test_insert_new_order(self):
        self.summary_dao.insert(self.participant_summary(self.participant))
        order_json = load_biobank_order_json(self.participant.participantId,
                                             filename='biobank_order_2.json')
        result = self.send_post(self.path, order_json)
        full_order_json = load_biobank_order_json(
            self.participant.participantId, filename='biobank_order_1.json')
        _strip_fields(result)
        _strip_fields(full_order_json)
        self.assertEquals(full_order_json, result)

    def test_biobank_history_on_insert(self):
        with self.bio_dao.session() as session:
            self.summary_dao.insert(self.participant_summary(self.participant))
            order_json = load_biobank_order_json(
                self.participant.participantId,
                filename='biobank_order_2.json')
            result = self.send_post(self.path, order_json)
            load_biobank_order_json(self.participant.participantId,
                                    filename='biobank_order_1.json')
            order_history = session.query(BiobankOrderHistory).first()
            identifier_history = session.query(
                BiobankOrderIdentifierHistory).first()
            sample_history = session.query(BiobankOrderedSampleHistory).first()

            self.assertEqual(result['id'], order_history.biobankOrderId)
            self.assertEqual(identifier_history.biobankOrderId, result['id'])
            self.assertEqual(sample_history.biobankOrderId, result['id'])
            self.assertEqual(result['meta']['versionId'], 'W/"1"')
            self.assertEqual(order_history.version, 1)

            # Test history on updates...
            biobank_order_id = result['identifier'][1]['value']
            path = self.path + '/' + biobank_order_id
            request_data = {
                "amendedReason": "Its all better",
                "amendedInfo": {
                    "author": {
                        "system": "https://www.pmi-ops.org/healthpro-username",
                        "value": "*****@*****.**"
                    },
                    "site": {
                        "system": "https://www.pmi-ops.org/site-id",
                        "value": "hpo-site-bannerphoenix"
                    }
                }
            }

            biobank_order_identifiers = {
                "created": "2018-02-21T16:25:12",
                "createdInfo": {
                    "author": {
                        "system": "https://www.pmi-ops.org/healthpro-username",
                        "value": "*****@*****.**"
                    },
                    "site": {
                        "system": "https://www.pmi-ops.org/site-id",
                        "value": "hpo-site-clinic-phoenix"
                    }
                }
            }
            get_order = self.send_get(path)
            full_order = get_order.copy()
            full_order.update(request_data)
            full_order.update(biobank_order_identifiers)

            self.assertEqual(len(full_order['samples']), 16)
            del full_order['samples'][0]

            self.send_put(path,
                          request_data=full_order,
                          headers={'If-Match': 'W/"1"'})

            with self.bio_dao.session() as session:
                amended_order = self.send_get(path)
                second_order_history = session.query(
                    BiobankOrderHistory).filter_by(version=2).first()
                second_order_samples = session.query(
                    BiobankOrderedSampleHistory).filter_by(version=2).first()
                second_order_identifier = session.query(BiobankOrderIdentifierHistory).filter_by(version=2)\
                                                                                                 .first()
                self.assertEqual(second_order_history.biobankOrderId,
                                 amended_order['id'])
                self.assertEqual(second_order_identifier.biobankOrderId,
                                 amended_order['id'])
                self.assertEqual(second_order_samples.biobankOrderId,
                                 amended_order['id'])

                # Check that original order hasn't changed in history
                original = session.query(BiobankOrderHistory).filter_by(
                    version=1).first()
                self.assertEqual(original.asdict(), order_history.asdict())

    def test_error_no_summary(self):
        order_json = load_biobank_order_json(self.participant.participantId)
        self.send_post(self.path,
                       order_json,
                       expected_status=httplib.BAD_REQUEST)

    def test_error_missing_required_fields(self):
        order_json = load_biobank_order_json(self.participant.participantId)
        del order_json['identifier']
        self.send_post(self.path,
                       order_json,
                       expected_status=httplib.BAD_REQUEST)

    def test_no_duplicate_test_within_order(self):
        order_json = load_biobank_order_json(self.participant.participantId)
        order_json['samples'].extend(list(order_json['samples']))
        self.send_post(self.path,
                       order_json,
                       expected_status=httplib.BAD_REQUEST)

    def test_auto_pair_updates_participant_and_summary(self):
        self.summary_dao.insert(self.participant_summary(self.participant))

        # Sanity check: No HPO yet.
        p_unpaired = self.participant_dao.get(self.participant.participantId)
        self.assertEquals(p_unpaired.hpoId, UNSET_HPO_ID)
        self.assertIsNone(p_unpaired.providerLink)
        s_unpaired = self.summary_dao.get(self.participant.participantId)
        self.assertEquals(s_unpaired.hpoId, UNSET_HPO_ID)

        self.send_post(self.path,
                       load_biobank_order_json(self.participant.participantId))

        # Some HPO has been set. (ParticipantDao tests cover more detailed cases / specific values.)
        p_paired = self.participant_dao.get(self.participant.participantId)
        self.assertNotEqual(p_paired.hpoId, UNSET_HPO_ID)
        self.assertIsNotNone(p_paired.providerLink)

        s_paired = self.summary_dao.get(self.participant.participantId)

        self.assertNotEqual(s_paired.hpoId, UNSET_HPO_ID)
        self.assertEqual(s_paired.biospecimenCollectedSiteId, s_paired.siteId)
        self.assertNotEqual(s_paired.biospecimenCollectedSiteId,
                            s_paired.biospecimenFinalizedSiteId)

        self.assertNotEqual(s_paired.siteId,
                            s_paired.physicalMeasurementsCreatedSiteId)
        self.assertNotEqual(s_paired.siteId,
                            s_paired.physicalMeasurementsFinalizedSiteId)

    def test_not_pairing_at_pm_when_has_bio(self):
        self.participant_id = self.create_participant()
        _id = int(self.participant_id[1:])
        self.path = ('Participant/%s/BiobankOrder' %
                     to_client_participant_id(_id))
        pid_numeric = from_client_participant_id(self.participant_id)
        self.send_consent(self.participant_id)
        self.send_post(self.path, load_biobank_order_json(pid_numeric))
        participant_paired = self.summary_dao.get(pid_numeric)

        self.assertEqual(participant_paired.siteId,
                         participant_paired.biospecimenCollectedSiteId)
        self.path = ('Participant/%s/PhysicalMeasurements' %
                     to_client_participant_id(pid_numeric))
        self._insert_measurements(datetime.datetime.utcnow().isoformat())
        self.assertNotEqual(
            participant_paired.siteId,
            participant_paired.physicalMeasurementsFinalizedSiteId)

    def test_bio_after_cancelled_pm(self):
        self.participant_id = self.create_participant()
        self.send_consent(self.participant_id)
        measurement = load_measurement_json(self.participant_id)
        measurement2 = load_measurement_json(self.participant_id)

        # send both PM's
        pm_path = 'Participant/%s/PhysicalMeasurements' % self.participant_id
        response = self.send_post(pm_path, measurement)
        self.send_post(pm_path, measurement2)

        # cancel the 1st PM
        pm_path = pm_path + '/' + response['id']
        cancel_info = get_restore_or_cancel_info()
        self.send_patch(pm_path, cancel_info)

        # set up questionnaires to hit the calculate_max_core_sample_time in participant summary
        questionnaire_id = self.create_questionnaire('questionnaire3.json')
        questionnaire_id_1 = self.create_questionnaire(
            'all_consents_questionnaire.json')
        questionnaire_id_2 = self.create_questionnaire('questionnaire4.json')
        self._submit_consent_questionnaire_response(
            self.participant_id,
            questionnaire_id_1,
            CONSENT_PERMISSION_YES_CODE,
            time=TIME_6)

        self.submit_questionnaire_response(self.participant_id,
                                           questionnaire_id,
                                           RACE_NONE_OF_THESE_CODE, None, None,
                                           datetime.date(1978, 10, 10))

        self._submit_empty_questionnaire_response(self.participant_id,
                                                  questionnaire_id_2)

        # send a biobank order
        _id = int(self.participant_id[1:])
        self.path = ('Participant/%s/BiobankOrder' %
                     to_client_participant_id(_id))
        pid_numeric = from_client_participant_id(self.participant_id)
        self.send_post(self.path, load_biobank_order_json(pid_numeric))

        # fetch participant summary
        ps = self.send_get('ParticipantSummary?participantId=%s' % _id)

        self.assertTrue(
            ps['entry'][0]['resource']["physicalMeasurementsFinalizedTime"])
        self.assertEquals(
            ps['entry'][0]['resource']["physicalMeasurementsFinalizedSite"],
            'hpo-site-bannerphoenix')
        self.assertIsNotNone('biobankId', ps['entry'][0]['resource'])

    def _insert_measurements(self, now=None):
        measurements_1 = load_measurement_json(self.participant_id, now)
        path_1 = 'Participant/%s/PhysicalMeasurements' % self.participant_id
        self.send_post(path_1, measurements_1)

    def _submit_consent_questionnaire_response(self,
                                               participant_id,
                                               questionnaire_id,
                                               ehr_consent_answer,
                                               time=TIME_1):
        code_answers = []
        _add_code_answer(code_answers, "ehrConsent", ehr_consent_answer)
        qr = make_questionnaire_response_json(participant_id,
                                              questionnaire_id,
                                              code_answers=code_answers)
        with FakeClock(time):
            self.send_post(
                'Participant/%s/QuestionnaireResponse' % participant_id, qr)

    def _submit_empty_questionnaire_response(self,
                                             participant_id,
                                             questionnaire_id,
                                             time=TIME_1):
        qr = make_questionnaire_response_json(participant_id, questionnaire_id)
        with FakeClock(time):
            self.send_post(
                'Participant/%s/QuestionnaireResponse' % participant_id, qr)
Exemple #20
0
class MetricsEhrApiTestBase(FlaskTestBase):
    def setUp(self, **kwargs):
        super(MetricsEhrApiTestBase, self).setUp(use_mysql=True, **kwargs)
        self.dao = ParticipantDao()
        self.ps_dao = ParticipantSummaryDao()
        self.ehr_receipt_dao = EhrReceiptDao()
        self.ps = ParticipantSummary()
        self.calendar_dao = CalendarDao()
        self.site_dao = SiteDao()
        self.hpo_dao = HPODao()
        self.org_dao = OrganizationDao()

        self.hpo_test = self._make_hpo(hpoId=TEST_HPO_ID,
                                       name=TEST_HPO_NAME,
                                       displayName='Test',
                                       organizationType=OrganizationType.UNSET)

        self.hpo_foo = self._make_hpo(hpoId=10, name='FOO', displayName='Foo')
        self.hpo_bar = self._make_hpo(hpoId=11, name='BAR', displayName='Bar')

        self.org_foo_a = self._make_org(organizationId=10,
                                        externalId='FOO_A',
                                        displayName='Foo A',
                                        hpoId=self.hpo_foo.hpoId)
        self.org_bar_a = self._make_org(organizationId=11,
                                        externalId='BAR_A',
                                        displayName='Bar A',
                                        hpoId=self.hpo_bar.hpoId)

    def _make_hpo(self, **kwargs):
        hpo = HPO(**kwargs)
        self.hpo_dao.insert(hpo)
        return hpo

    def _make_org(self, **kwargs):
        org = Organization(**kwargs)
        self.org_dao.insert(org)
        return org

    def _make_participant(self,
                          participant,
                          first_name=None,
                          last_name=None,
                          hpo=None,
                          organization=None,
                          unconsented=False,
                          time_int=None,
                          time_study=None,
                          time_mem=None,
                          time_fp=None,
                          time_fp_stored=None,
                          gender_id=None,
                          dob=None,
                          state_id=None):
        """
    Create a participant in a transient test database.

    Note: copied from ParticipantCountsOverTimeApiTest

    :param participant: Participant object
    :param first_name: First name
    :param last_name: Last name
    :param time_int: Time that participant fulfilled INTERESTED criteria
    :param time_mem: Time that participant fulfilled MEMBER criteria
    :param time_fp: Time that participant fulfilled FULL_PARTICIPANT criteria
    :return: Participant object
    """

        participant.hpoId = hpo.hpoId
        participant.organizationId = organization.organizationId

        if unconsented is True:
            enrollment_status = None
        elif time_mem is None:
            enrollment_status = EnrollmentStatus.INTERESTED
        elif time_fp is None:
            enrollment_status = EnrollmentStatus.MEMBER
        else:
            enrollment_status = EnrollmentStatus.FULL_PARTICIPANT

        with FakeClock(time_int):
            self.dao.insert(participant)

        participant.providerLink = make_primary_provider_link_for_name(
            hpo.name)
        with FakeClock(time_mem):
            self.dao.update(participant)

        if enrollment_status is None:
            return None

        summary = self.participant_summary(participant)

        if first_name:
            summary.firstName = first_name
        if last_name:
            summary.lastName = last_name

        if gender_id:
            summary.genderIdentityId = gender_id
        if dob:
            summary.dateOfBirth = dob
        else:
            summary.dateOfBirth = datetime.date(1978, 10, 10)
        if state_id:
            summary.stateId = state_id

        summary.enrollmentStatus = enrollment_status

        summary.enrollmentStatusMemberTime = time_mem
        summary.enrollmentStatusCoreOrderedSampleTime = time_fp
        summary.enrollmentStatusCoreStoredSampleTime = time_fp_stored

        summary.hpoId = hpo.hpoId
        summary.organizationId = organization.organizationId

        if time_study is not None:
            with FakeClock(time_mem):
                summary.consentForStudyEnrollment = QuestionnaireStatus.SUBMITTED
                summary.consentForStudyEnrollmentTime = time_study

        if time_mem is not None:
            with FakeClock(time_mem):
                summary.consentForElectronicHealthRecords = QuestionnaireStatus.SUBMITTED
                summary.consentForElectronicHealthRecordsTime = time_mem

        if time_fp is not None:
            with FakeClock(time_fp):
                if not summary.consentForElectronicHealthRecords:
                    summary.consentForElectronicHealthRecords = QuestionnaireStatus.SUBMITTED
                    summary.consentForElectronicHealthRecordsTime = time_fp
                summary.questionnaireOnTheBasicsTime = time_fp
                summary.questionnaireOnLifestyleTime = time_fp
                summary.questionnaireOnOverallHealthTime = time_fp
                summary.physicalMeasurementsFinalizedTime = time_fp
                summary.physicalMeasurementsTime = time_fp
                summary.sampleOrderStatus1ED04Time = time_fp
                summary.sampleOrderStatus1SALTime = time_fp
                summary.sampleStatus1ED04Time = time_fp
                summary.sampleStatus1SALTime = time_fp
                summary.biospecimenOrderTime = time_fp
                summary.numCompletedBaselinePPIModules = REQUIRED_PPI_MODULE_COUNT

        self.ps_dao.insert(summary)

        return summary

    def _update_ehr(self, participant_summary, update_time):
        receipt = EhrReceipt(organizationId=participant_summary.organizationId,
                             receiptTime=update_time)
        self.ehr_receipt_dao.insert(receipt)
        self.ps_dao.update_ehr_status(participant_summary, update_time)
        self.ps_dao.update(participant_summary)
class DvOrderDaoTestBase(FlaskTestBase):
    def __init__(self, *args, **kwargs):
        super(DvOrderDaoTestBase, self).__init__(*args, **kwargs)

        # to participant's house
        self.post_delivery = load_test_data_json(
            'dv_order_api_post_supply_delivery.json')
        self.put_delivery = load_test_data_json(
            'dv_order_api_put_supply_delivery.json')

        # to mayo
        self.post_delivery_mayo = self._set_mayo_address(self.post_delivery)
        self.put_delivery_mayo = self._set_mayo_address(self.put_delivery)

        self.post_request = load_test_data_json(
            'dv_order_api_post_supply_request.json')
        self.put_request = load_test_data_json(
            'dv_order_api_put_supply_request.json')

        self.mayolink_response = {
            'orders': {
                'order': {
                    'status': 'Queued',
                    'reference_number': 'barcode',
                    'received': '2019-04-05 12:00:00',
                    'number': '12345',
                    'patient': {
                        'medical_record_number': 'WEB1ABCD1234'
                    }
                }
            }
        }

        mayolinkapi_patcher = mock.patch(
            'dao.dv_order_dao.MayoLinkApi',
            **{'return_value.post.return_value': self.mayolink_response})
        mayolinkapi_patcher.start()
        self.addCleanup(mayolinkapi_patcher.stop)

    def setUp(self):
        super(DvOrderDaoTestBase, self).setUp(use_mysql=True)

        self.dao = DvOrderDao()
        self.code_dao = CodeDao()

        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()

        self.participant = Participant(participantId=123456789, biobankId=7)

        self.participant_dao.insert(self.participant)
        self.summary = self.participant_summary(self.participant)
        self.summary_dao.insert(self.summary)

    def _set_mayo_address(self, data):
        """ set the address of a Supply Delivery json to the Mayo address """
        req = copy.deepcopy(data)

        for item in req['contained']:
            if item['resourceType'] == 'Location':
                item['address'] = {
                    'city': "Rochester",
                    'state': "MN",
                    'postalCode': "55901",
                    'line': ["3050 Superior Drive NW"],
                    'type': 'postal',
                    'use': 'work'
                }
        # Mayo tracking ID
        req['identifier'] = \
            [{"system": "http://joinallofus.org/fhir/trackingId", "value": "98765432109876543210"}]
        # Participant Tracking ID
        req['partOf'] = \
            [{'identifier': {"system": "http://joinallofus.org/fhir/trackingId", "value": "P12435464423"}}]
        return req

    def test_insert_biobank_order(self):
        payload = self.send_post('SupplyRequest',
                                 request_data=self.post_request,
                                 expected_status=httplib.CREATED)
        request_response = json.loads(payload.response[0])
        location = payload.location.rsplit('/', 1)[-1]
        self.send_put('SupplyRequest/{}'.format(location),
                      request_data=self.put_request)

        payload = self.send_post('SupplyDelivery',
                                 request_data=self.post_delivery,
                                 expected_status=httplib.CREATED)
        post_response = json.loads(payload.response[0])
        location = payload.location.rsplit('/', 1)[-1]
        put_response = self.send_put('SupplyDelivery/{}'.format(location),
                                     request_data=self.put_delivery)

        self.assertEquals(request_response['version'], 1)
        self.assertEquals(post_response['version'], 3)
        self.assertEquals(post_response['meta']['versionId'].strip('W/'),
                          '"3"')
        self.assertEquals(put_response['version'], 4)
        self.assertEquals(put_response['meta']['versionId'].strip('W/'), '"4"')
        self.assertEquals(put_response['barcode'], 'SABR90160121INA')
        self.assertEquals(put_response['order_id'], 999999)
        self.assertEquals(put_response['trackingId'], 'P12435464423999999999')

        payload = self.send_post('SupplyDelivery',
                                 request_data=self.post_delivery_mayo,
                                 expected_status=httplib.CREATED)
        post_response = json.loads(payload.response[0])

        self.assertEquals(post_response['biobankOrderId'], '12345')
        self.assertEquals(post_response['biobankStatus'], 'Queued')
        self.assertEquals(post_response['trackingId'], '98765432109876543210')

        put_response = self.send_put('SupplyDelivery/{}'.format(location),
                                     request_data=self.put_delivery_mayo)

        self.assertEquals(put_response['trackingId'], '98765432109876543210')

    def test_enumerate_shipping_status(self):
        fhir_resource = SimpleFhirR4Reader(self.post_request)
        status = self.dao._enumerate_order_shipping_status(
            fhir_resource.status)
        self.assertEquals(status, OrderShipmentStatus.SHIPPED)

    def test_enumerate_tracking_status(self):
        fhir_resource = SimpleFhirR4Reader(self.post_delivery)
        status = self.dao._enumerate_order_tracking_status(
            fhir_resource.extension.get(url=DV_FHIR_URL +
                                        'tracking-status').valueString)
        self.assertEquals(status, OrderShipmentTrackingStatus.IN_TRANSIT)

    def test_from_client_json(self):
        self.make_supply_posts(self.post_request, self.post_delivery)

        expected_result = self.build_expected_resource_type_data(
            self.post_delivery)

        result_from_dao = self.dao.from_client_json(
            self.post_delivery, participant_id=self.participant.participantId)

        # run tests against result_from_dao
        for i, test_field in enumerate(expected_result):
            self.assertEqual(
                test_field, getattr(result_from_dao,
                                    expected_result._fields[i]))

    def test_dv_order_post_inserted_correctly(self):
        def run_db_test(expected_result):
            """ Runs the db test against the expected result"""

            # return a BiobankDVOrder object from database
            with self.dao.session() as session:
                dv_order_result = session.query(BiobankDVOrder).filter_by(
                    participantId=self.participant.participantId).first()

            # run tests against dv_order_result
            for i, test_field in enumerate(expected_result):
                self.assertEqual(
                    test_field,
                    getattr(dv_order_result, expected_result._fields[i]))

        # run DB test after each post
        test_data_payloads = [self.post_request, self.post_delivery]
        for test_case in test_data_payloads:
            expected_data = self.build_expected_resource_type_data(test_case)

            # make posts to create SupplyRequest and SupplyDelivery records
            self.make_supply_posts(test_case)
            run_db_test(expected_data)

    @mock.patch('dao.dv_order_dao.MayoLinkApi')
    def test_service_unavailable(self, mocked_api):
        #pylint: disable=unused-argument
        def raises(*args):
            raise ServiceUnavailable()

        with self.assertRaises(ServiceUnavailable):
            mocked_api.return_value.post.side_effect = raises
            self.dao.send_order(self.post_delivery,
                                self.participant.participantId)

    def build_expected_resource_type_data(self, resource_type):
        """Helper function to build the data we are expecting from the test-data file."""
        fhir_resource = SimpleFhirR4Reader(resource_type)

        test_fields = {}
        fhir_address = {}

        # fields to test with the same structure in both payloads
        fhir_device = fhir_resource.contained.get(resourceType="Device")
        test_fields.update({
            'itemName':
            fhir_device.deviceName.get(type="manufacturer-name").name,
            'orderType':
            fhir_resource.extension.get(url=DV_ORDER_URL).valueString
        })

        # add the fields to test for each resource type (SupplyRequest, SupplyDelivery)
        if resource_type == self.post_request:
            test_fields.update({
                'order_id':
                int(
                    fhir_resource.identifier.get(system=DV_FHIR_URL +
                                                 "orderId").value),
                'supplier':
                fhir_resource.contained.get(resourceType="Organization").id,
                'supplierStatus':
                fhir_resource.extension.get(
                    url=DV_FULFILLMENT_URL).valueString,
                'itemQuantity':
                fhir_resource.quantity.value,
                'itemSKUCode':
                fhir_device.identifier.get(system=DV_FHIR_URL + "SKU").value,
            })
            # Address Handling
            fhir_address = fhir_resource.contained.get(
                resourceType="Patient").address[0]

        if resource_type == self.post_delivery:
            test_fields.update({
                'order_id':
                int(fhir_resource.basedOn[0].identifier.value),
                'shipmentEstArrival':
                parse_date(
                    fhir_resource.extension.get(
                        url=DV_FHIR_URL +
                        "expected-delivery-date").valueDateTime),
                'shipmentCarrier':
                fhir_resource.extension.get(url=DV_FHIR_URL +
                                            "carrier").valueString,
                'trackingId':
                fhir_resource.identifier.get(system=DV_FHIR_URL +
                                             "trackingId").value,
                'shipmentLastUpdate':
                parse_date(fhir_resource.occurrenceDateTime),
            })
            # Address Handling
            fhir_address = fhir_resource.contained.get(
                resourceType="Location").get("address")

        address_fields = {
            "streetAddress1": fhir_address.line[0],
            "streetAddress2": '',
            "city": fhir_address.city,
            "stateId": get_code_id(fhir_address, self.code_dao, "state",
                                   "State_"),
            "zipCode": fhir_address.postalCode,
        }

        # street address 2
        if len(list(fhir_address.line)) > 1:
            address_fields['streetAddress2'] = fhir_address.line[1]

        test_fields.update(address_fields)

        Supply = namedtuple('Supply', test_fields.keys())
        expected_data = Supply(**test_fields)

        return expected_data

    def make_supply_posts(self, *test_cases):
        """Helper function to make the POSTs for tests that depend on existing dv_orders"""
        if self.post_request in test_cases:
            self.send_post(
                "SupplyRequest",
                request_data=self.post_request,
                expected_status=httplib.CREATED,
            )

        if self.post_delivery in test_cases:
            self.send_post(
                "SupplyDelivery",
                request_data=self.post_delivery,
                expected_status=httplib.CREATED,
            )
Exemple #22
0
class GenomicPipelineTest(CloudStorageSqlTestBase, NdbTestBase):
    def setUp(self):
        super(GenomicPipelineTest, self).setUp(use_mysql=True)
        NdbTestBase.doSetUp(self)
        TestBase.setup_fake(self)
        # Everything is stored as a list, so override bucket name as a 1-element list.
        config.override_setting(config.GENOMIC_SET_BUCKET_NAME, [_FAKE_BUCKET])
        config.override_setting(config.BIOBANK_SAMPLES_BUCKET_NAME,
                                [_FAKE_BIOBANK_SAMPLE_BUCKET])
        config.override_setting(config.GENOMIC_BIOBANK_MANIFEST_FOLDER_NAME,
                                [_FAKE_BUCKET_FOLDER])
        config.override_setting(
            config.GENOMIC_BIOBANK_MANIFEST_RESULT_FOLDER_NAME,
            [_FAKE_BUCKET_RESULT_FOLDER])
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self._participant_i = 1

    def _write_cloud_csv(self,
                         file_name,
                         contents_str,
                         bucket=None,
                         folder=None):
        bucket = _FAKE_BUCKET if bucket is None else bucket
        if folder is None:
            path = '/%s/%s' % (bucket, file_name)
        else:
            path = '/%s/%s/%s' % (bucket, folder, file_name)
        with cloudstorage_api.open(path, mode='w') as cloud_file:
            cloud_file.write(contents_str.encode('utf-8'))

    def _make_participant(self, **kwargs):
        """
    Make a participant with custom settings.
    default should create a valid participant.
    """
        i = self._participant_i
        self._participant_i += 1
        participant = Participant(participantId=i, biobankId=i, **kwargs)
        self.participant_dao.insert(participant)
        return participant

    def _make_biobank_order(self, **kwargs):
        """Makes a new BiobankOrder (same values every time) with valid/complete defaults.

    Kwargs pass through to BiobankOrder constructor, overriding defaults.
    """
        participant_id = kwargs['participantId']

        for k, default_value in (
            ('biobankOrderId', u'1'), ('created', clock.CLOCK.now()),
            ('sourceSiteId', 1), ('sourceUsername', u'*****@*****.**'),
            ('collectedSiteId', 1), ('collectedUsername', u'*****@*****.**'),
            ('processedSiteId', 1), ('processedUsername', u'*****@*****.**'),
            ('finalizedSiteId', 2), ('finalizedUsername',
                                     u'*****@*****.**'), ('version', 1),
            ('identifiers',
             [BiobankOrderIdentifier(system=u'a', value=u'c')]), ('samples', [
                 BiobankOrderedSample(test=u'1SAL2',
                                      description=u'description',
                                      processingRequired=True)
             ]), ('dvOrders',
                  [BiobankDVOrder(participantId=participant_id, version=1)])):
            if k not in kwargs:
                kwargs[k] = default_value

        biobank_order = BiobankOrderDao().insert(BiobankOrder(**kwargs))
        return biobank_order

    def _make_summary(self, participant, **override_kwargs):
        """
    Make a summary with custom settings.
    default should create a valid summary.
    """
        valid_kwargs = dict(participantId=participant.participantId,
                            biobankId=participant.biobankId,
                            withdrawalStatus=participant.withdrawalStatus,
                            dateOfBirth=datetime.datetime(2000, 1, 1),
                            firstName='foo',
                            lastName='bar',
                            zipCode='12345',
                            sampleStatus1ED04=SampleStatus.RECEIVED,
                            sampleStatus1SAL2=SampleStatus.RECEIVED,
                            samplesToIsolateDNA=SampleStatus.RECEIVED,
                            consentForStudyEnrollmentTime=datetime.datetime(
                                2019, 1, 1))
        kwargs = dict(valid_kwargs, **override_kwargs)
        summary = self._participant_summary_with_defaults(**kwargs)
        self.summary_dao.insert(summary)
        return summary

    def test_end_to_end_valid_case(self):
        participant = self._make_participant()
        self._make_summary(participant)
        self._make_biobank_order(participantId=participant.participantId,
                                 biobankOrderId=participant.participantId,
                                 identifiers=[
                                     BiobankOrderIdentifier(
                                         system=u'https://www.pmi-ops.org',
                                         value=u'12345678')
                                 ])

        participant2 = self._make_participant()
        self._make_summary(participant2)
        self._make_biobank_order(participantId=participant2.participantId,
                                 biobankOrderId=participant2.participantId,
                                 identifiers=[
                                     BiobankOrderIdentifier(
                                         system=u'https://www.pmi-ops.org',
                                         value=u'12345679')
                                 ])

        participant3 = self._make_participant()
        self._make_summary(participant3)
        self._make_biobank_order(participantId=participant3.participantId,
                                 biobankOrderId=participant3.participantId,
                                 identifiers=[
                                     BiobankOrderIdentifier(
                                         system=u'https://www.pmi-ops.org',
                                         value=u'12345680')
                                 ])

        samples_file = test_data.open_genomic_set_file(
            'Genomic-Test-Set-test-2.csv')

        input_filename = 'Genomic-Test-Set-v1%s.csv' % self\
          ._naive_utc_to_naive_central(clock.CLOCK.now())\
          .strftime(genomic_set_file_handler.INPUT_CSV_TIME_FORMAT)

        self._write_cloud_csv(input_filename, samples_file)

        manifest_result_file = test_data.open_genomic_set_file(
            'Genomic-Manifest-Result-test.csv')

        manifest_result_filename = 'Genomic-Manifest-Result-AoU-1-v1%s.csv' % self \
          ._naive_utc_to_naive_central(clock.CLOCK.now()) \
          .strftime(genomic_set_file_handler.INPUT_CSV_TIME_FORMAT)

        self._write_cloud_csv(manifest_result_filename,
                              manifest_result_file,
                              bucket=_FAKE_BIOBANK_SAMPLE_BUCKET,
                              folder=_FAKE_BUCKET_RESULT_FOLDER)

        genomic_pipeline.process_genomic_water_line()

        # verify result file
        bucket_name = config.getSetting(config.GENOMIC_SET_BUCKET_NAME)
        path = self._find_latest_genomic_set_csv(bucket_name,
                                                 'Validation-Result')
        csv_file = cloudstorage_api.open(path)
        csv_reader = csv.DictReader(csv_file, delimiter=',')

        class ResultCsvColumns(object):
            """Names of CSV columns that we read from the genomic set upload."""
            GENOMIC_SET_NAME = 'genomic_set_name'
            GENOMIC_SET_CRITERIA = 'genomic_set_criteria'
            PID = 'pid'
            BIOBANK_ORDER_ID = 'biobank_order_id'
            NY_FLAG = 'ny_flag'
            SEX_AT_BIRTH = 'sex_at_birth'
            GENOME_TYPE = 'genome_type'
            STATUS = 'status'
            INVALID_REASON = 'invalid_reason'

            ALL = (GENOMIC_SET_NAME, GENOMIC_SET_CRITERIA, PID,
                   BIOBANK_ORDER_ID, NY_FLAG, SEX_AT_BIRTH, GENOME_TYPE,
                   STATUS, INVALID_REASON)

        missing_cols = set(ResultCsvColumns.ALL) - set(csv_reader.fieldnames)
        self.assertEqual(len(missing_cols), 0)
        rows = list(csv_reader)
        self.assertEqual(len(rows), 3)
        self.assertEqual(rows[0][ResultCsvColumns.GENOMIC_SET_NAME],
                         'name_xxx')
        self.assertEqual(rows[0][ResultCsvColumns.GENOMIC_SET_CRITERIA],
                         'criteria_xxx')
        self.assertEqual(rows[0][ResultCsvColumns.STATUS], 'valid')
        self.assertEqual(rows[0][ResultCsvColumns.INVALID_REASON], '')
        self.assertEqual(rows[0][ResultCsvColumns.PID], '1')
        self.assertEqual(rows[0][ResultCsvColumns.BIOBANK_ORDER_ID], '1')
        self.assertEqual(rows[0][ResultCsvColumns.NY_FLAG], 'Y')
        self.assertEqual(rows[0][ResultCsvColumns.GENOME_TYPE], 'aou_wgs')
        self.assertEqual(rows[0][ResultCsvColumns.SEX_AT_BIRTH], 'M')

        self.assertEqual(rows[1][ResultCsvColumns.GENOMIC_SET_NAME],
                         'name_xxx')
        self.assertEqual(rows[1][ResultCsvColumns.GENOMIC_SET_CRITERIA],
                         'criteria_xxx')
        self.assertEqual(rows[1][ResultCsvColumns.STATUS], 'valid')
        self.assertEqual(rows[1][ResultCsvColumns.INVALID_REASON], '')
        self.assertEqual(rows[1][ResultCsvColumns.PID], '2')
        self.assertEqual(rows[1][ResultCsvColumns.BIOBANK_ORDER_ID], '2')
        self.assertEqual(rows[1][ResultCsvColumns.NY_FLAG], 'N')
        self.assertEqual(rows[1][ResultCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[1][ResultCsvColumns.SEX_AT_BIRTH], 'F')

        self.assertEqual(rows[2][ResultCsvColumns.GENOMIC_SET_NAME],
                         'name_xxx')
        self.assertEqual(rows[2][ResultCsvColumns.GENOMIC_SET_CRITERIA],
                         'criteria_xxx')
        self.assertEqual(rows[2][ResultCsvColumns.STATUS], 'valid')
        self.assertEqual(rows[2][ResultCsvColumns.INVALID_REASON], '')
        self.assertEqual(rows[2][ResultCsvColumns.PID], '3')
        self.assertEqual(rows[2][ResultCsvColumns.BIOBANK_ORDER_ID], '3')
        self.assertEqual(rows[2][ResultCsvColumns.NY_FLAG], 'N')
        self.assertEqual(rows[2][ResultCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[2][ResultCsvColumns.SEX_AT_BIRTH], 'M')

        # verify manifest files
        bucket_name = config.getSetting(config.BIOBANK_SAMPLES_BUCKET_NAME)

        class ExpectedCsvColumns(object):
            VALUE = 'value'
            BIOBANK_ID = 'biobank_id'
            SEX_AT_BIRTH = 'sex_at_birth'
            GENOME_TYPE = 'genome_type'
            NY_FLAG = 'ny_flag'
            REQUEST_ID = 'request_id'
            PACKAGE_ID = 'package_id'

            ALL = (VALUE, SEX_AT_BIRTH, GENOME_TYPE, NY_FLAG, REQUEST_ID,
                   PACKAGE_ID)

        path = self._find_latest_genomic_set_csv(bucket_name, 'Manifest')
        csv_file = cloudstorage_api.open(path)
        csv_reader = csv.DictReader(csv_file, delimiter=',')

        missing_cols = set(ExpectedCsvColumns.ALL) - set(csv_reader.fieldnames)
        self.assertEqual(len(missing_cols), 0)
        rows = list(csv_reader)
        self.assertEqual(rows[0][ExpectedCsvColumns.VALUE], '12345678')
        self.assertEqual(rows[0][ExpectedCsvColumns.BIOBANK_ID], '1')
        self.assertEqual(rows[0][ExpectedCsvColumns.SEX_AT_BIRTH], 'M')
        self.assertEqual(rows[0][ExpectedCsvColumns.GENOME_TYPE], 'aou_wgs')
        self.assertEqual(rows[0][ExpectedCsvColumns.NY_FLAG], 'Y')
        self.assertEqual(rows[1][ExpectedCsvColumns.VALUE], '12345679')
        self.assertEqual(rows[1][ExpectedCsvColumns.BIOBANK_ID], '2')
        self.assertEqual(rows[1][ExpectedCsvColumns.SEX_AT_BIRTH], 'F')
        self.assertEqual(rows[1][ExpectedCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[1][ExpectedCsvColumns.NY_FLAG], 'N')
        self.assertEqual(rows[2][ExpectedCsvColumns.VALUE], '12345680')
        self.assertEqual(rows[2][ExpectedCsvColumns.BIOBANK_ID], '3')
        self.assertEqual(rows[2][ExpectedCsvColumns.SEX_AT_BIRTH], 'M')
        self.assertEqual(rows[2][ExpectedCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[2][ExpectedCsvColumns.NY_FLAG], 'N')

        # verify manifest result files
        bucket_name = config.getSetting(config.BIOBANK_SAMPLES_BUCKET_NAME)

        class ExpectedCsvColumns(object):
            VALUE = 'value'
            BIOBANK_ID = 'biobank_id'
            SEX_AT_BIRTH = 'sex_at_birth'
            GENOME_TYPE = 'genome_type'
            NY_FLAG = 'ny_flag'
            REQUEST_ID = 'request_id'
            PACKAGE_ID = 'package_id'

            ALL = (VALUE, SEX_AT_BIRTH, GENOME_TYPE, NY_FLAG, REQUEST_ID,
                   PACKAGE_ID)

        path = self._find_latest_genomic_set_csv(bucket_name,
                                                 'Manifest-Result')
        csv_file = cloudstorage_api.open(path)
        csv_reader = csv.DictReader(csv_file, delimiter=',')

        missing_cols = set(ExpectedCsvColumns.ALL) - set(csv_reader.fieldnames)
        self.assertEqual(len(missing_cols), 0)
        rows = list(csv_reader)
        self.assertEqual(rows[0][ExpectedCsvColumns.VALUE], '12345678')
        self.assertEqual(rows[0][ExpectedCsvColumns.BIOBANK_ID], '1')
        self.assertEqual(rows[0][ExpectedCsvColumns.SEX_AT_BIRTH], 'M')
        self.assertEqual(rows[0][ExpectedCsvColumns.GENOME_TYPE], 'aou_wgs')
        self.assertEqual(rows[0][ExpectedCsvColumns.NY_FLAG], 'Y')
        self.assertEqual(rows[0][ExpectedCsvColumns.PACKAGE_ID],
                         'PKG-XXXX-XXXX1')

        self.assertEqual(rows[1][ExpectedCsvColumns.VALUE], '12345679')
        self.assertEqual(rows[1][ExpectedCsvColumns.BIOBANK_ID], '2')
        self.assertEqual(rows[1][ExpectedCsvColumns.SEX_AT_BIRTH], 'F')
        self.assertEqual(rows[1][ExpectedCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[1][ExpectedCsvColumns.NY_FLAG], 'N')
        self.assertEqual(rows[1][ExpectedCsvColumns.PACKAGE_ID],
                         'PKG-XXXX-XXXX2')

        self.assertEqual(rows[2][ExpectedCsvColumns.VALUE], '12345680')
        self.assertEqual(rows[2][ExpectedCsvColumns.BIOBANK_ID], '3')
        self.assertEqual(rows[2][ExpectedCsvColumns.SEX_AT_BIRTH], 'M')
        self.assertEqual(rows[2][ExpectedCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[2][ExpectedCsvColumns.NY_FLAG], 'N')
        self.assertEqual(rows[2][ExpectedCsvColumns.PACKAGE_ID],
                         'PKG-XXXX-XXXX3')

        # verify package id in database
        member_dao = GenomicSetMemberDao()
        members = member_dao.get_all()
        for member in members:
            self.assertIn(
                member.packageId,
                ['PKG-XXXX-XXXX1', 'PKG-XXXX-XXXX2', 'PKG-XXXX-XXXX3'])

    def test_wrong_file_name_case(self):
        samples_file = test_data.open_genomic_set_file(
            'Genomic-Test-Set-test-3.csv')

        input_filename = 'Genomic-Test-Set-v1%swrong-name.csv' % self \
          ._naive_utc_to_naive_central(clock.CLOCK.now()) \
          .strftime(genomic_set_file_handler.INPUT_CSV_TIME_FORMAT)

        self._write_cloud_csv(input_filename, samples_file)

        with self.assertRaises(DataError):
            genomic_pipeline.process_genomic_water_line()

        manifest_result_file = test_data.open_genomic_set_file(
            'Genomic-Manifest-Result-test.csv')

        manifest_result_filename = 'Genomic-Manifest-Result-AoU-1-v1%swrong-name.csv' % self \
          ._naive_utc_to_naive_central(clock.CLOCK.now()) \
          .strftime(genomic_set_file_handler.INPUT_CSV_TIME_FORMAT)

        self._write_cloud_csv(manifest_result_filename,
                              manifest_result_file,
                              bucket=_FAKE_BIOBANK_SAMPLE_BUCKET,
                              folder=_FAKE_BUCKET_RESULT_FOLDER)

        with self.assertRaises(DataError):
            genomic_pipeline.process_genomic_water_line()

    def test_over_24hours_genomic_set_file_case(self):
        samples_file = test_data.open_genomic_set_file(
            'Genomic-Test-Set-test-3.csv')

        over_24hours_time = clock.CLOCK.now() - datetime.timedelta(hours=25)

        input_filename = 'Genomic-Test-Set-v1%s.csv' % self \
          ._naive_utc_to_naive_central(over_24hours_time) \
          .strftime(genomic_set_file_handler.INPUT_CSV_TIME_FORMAT)

        self._write_cloud_csv(input_filename, samples_file)

        genomic_pipeline.process_genomic_water_line()

        member_dao = GenomicSetMemberDao()
        members = member_dao.get_all()
        self.assertEqual(len(members), 0)

    def test_end_to_end_invalid_case(self):
        participant = self._make_participant()
        self._make_summary(participant, dateOfBirth='2018-02-14')
        self._make_biobank_order(participantId=participant.participantId,
                                 biobankOrderId=participant.participantId,
                                 identifiers=[
                                     BiobankOrderIdentifier(
                                         system=u'https://www.pmi-ops.org',
                                         value=u'12345678')
                                 ])

        participant2 = self._make_participant()
        self._make_summary(participant2,
                           consentForStudyEnrollmentTime=datetime.datetime(
                               1990, 1, 1))
        self._make_biobank_order(participantId=participant2.participantId,
                                 biobankOrderId=participant2.participantId,
                                 identifiers=[
                                     BiobankOrderIdentifier(
                                         system=u'https://www.pmi-ops.org',
                                         value=u'12345679')
                                 ])

        participant3 = self._make_participant()
        self._make_summary(participant3, zipCode='')
        self._make_biobank_order(participantId=participant3.participantId,
                                 biobankOrderId=participant3.participantId,
                                 identifiers=[
                                     BiobankOrderIdentifier(
                                         system=u'https://www.pmi-ops.org',
                                         value=u'12345680')
                                 ])

        participant4 = self._make_participant()
        self._make_summary(participant4)
        self._make_biobank_order(
            participantId=participant4.participantId,
            biobankOrderId=participant4.participantId,
            identifiers=[BiobankOrderIdentifier(system=u'c', value=u'e')])

        samples_file = test_data.open_genomic_set_file(
            'Genomic-Test-Set-test-3.csv')

        input_filename = 'Genomic-Test-Set-v1%s.csv' % self\
          ._naive_utc_to_naive_central(clock.CLOCK.now())\
          .strftime(genomic_set_file_handler.INPUT_CSV_TIME_FORMAT)

        self._write_cloud_csv(input_filename, samples_file)

        genomic_pipeline.process_genomic_water_line()

        # verify result file
        bucket_name = config.getSetting(config.GENOMIC_SET_BUCKET_NAME)
        path = self._find_latest_genomic_set_csv(bucket_name,
                                                 'Validation-Result')
        csv_file = cloudstorage_api.open(path)
        csv_reader = csv.DictReader(csv_file, delimiter=',')

        class ResultCsvColumns(object):
            """Names of CSV columns that we read from the genomic set upload."""
            GENOMIC_SET_NAME = 'genomic_set_name'
            GENOMIC_SET_CRITERIA = 'genomic_set_criteria'
            PID = 'pid'
            BIOBANK_ORDER_ID = 'biobank_order_id'
            NY_FLAG = 'ny_flag'
            SEX_AT_BIRTH = 'sex_at_birth'
            GENOME_TYPE = 'genome_type'
            STATUS = 'status'
            INVALID_REASON = 'invalid_reason'

            ALL = (GENOMIC_SET_NAME, GENOMIC_SET_CRITERIA, PID,
                   BIOBANK_ORDER_ID, NY_FLAG, SEX_AT_BIRTH, GENOME_TYPE,
                   STATUS, INVALID_REASON)

        missing_cols = set(ResultCsvColumns.ALL) - set(csv_reader.fieldnames)
        self.assertEqual(len(missing_cols), 0)
        rows = list(csv_reader)
        self.assertEqual(len(rows), 4)
        self.assertEqual(rows[0][ResultCsvColumns.GENOMIC_SET_NAME],
                         'name_xxx')
        self.assertEqual(rows[0][ResultCsvColumns.GENOMIC_SET_CRITERIA],
                         'criteria_xxx')
        self.assertEqual(rows[0][ResultCsvColumns.STATUS], 'invalid')
        self.assertEqual(rows[0][ResultCsvColumns.INVALID_REASON],
                         'INVALID_AGE')
        self.assertEqual(rows[0][ResultCsvColumns.PID], '1')
        self.assertEqual(rows[0][ResultCsvColumns.BIOBANK_ORDER_ID], '1')
        self.assertEqual(rows[0][ResultCsvColumns.NY_FLAG], 'Y')
        self.assertEqual(rows[0][ResultCsvColumns.GENOME_TYPE], 'aou_wgs')
        self.assertEqual(rows[0][ResultCsvColumns.SEX_AT_BIRTH], 'M')

        self.assertEqual(rows[1][ResultCsvColumns.GENOMIC_SET_NAME],
                         'name_xxx')
        self.assertEqual(rows[1][ResultCsvColumns.GENOMIC_SET_CRITERIA],
                         'criteria_xxx')
        self.assertEqual(rows[1][ResultCsvColumns.STATUS], 'invalid')
        self.assertEqual(rows[1][ResultCsvColumns.INVALID_REASON],
                         'INVALID_CONSENT')
        self.assertEqual(rows[1][ResultCsvColumns.PID], '2')
        self.assertEqual(rows[1][ResultCsvColumns.BIOBANK_ORDER_ID], '2')
        self.assertEqual(rows[1][ResultCsvColumns.NY_FLAG], 'N')
        self.assertEqual(rows[1][ResultCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[1][ResultCsvColumns.SEX_AT_BIRTH], 'F')

        self.assertEqual(rows[2][ResultCsvColumns.GENOMIC_SET_NAME],
                         'name_xxx')
        self.assertEqual(rows[2][ResultCsvColumns.GENOMIC_SET_CRITERIA],
                         'criteria_xxx')
        self.assertEqual(rows[2][ResultCsvColumns.STATUS], 'invalid')
        self.assertEqual(rows[2][ResultCsvColumns.INVALID_REASON],
                         'INVALID_NY_ZIPCODE')
        self.assertEqual(rows[2][ResultCsvColumns.PID], '3')
        self.assertEqual(rows[2][ResultCsvColumns.BIOBANK_ORDER_ID], '3')
        self.assertEqual(rows[2][ResultCsvColumns.NY_FLAG], 'N')
        self.assertEqual(rows[2][ResultCsvColumns.GENOME_TYPE], 'aou_array')
        self.assertEqual(rows[2][ResultCsvColumns.SEX_AT_BIRTH], 'M')

        self.assertEqual(rows[3][ResultCsvColumns.GENOMIC_SET_NAME],
                         'name_xxx')
        self.assertEqual(rows[3][ResultCsvColumns.GENOMIC_SET_CRITERIA],
                         'criteria_xxx')
        self.assertEqual(rows[3][ResultCsvColumns.STATUS], 'invalid')
        self.assertEqual(rows[3][ResultCsvColumns.INVALID_REASON],
                         'INVALID_BIOBANK_ORDER_CLIENT_ID')
        self.assertEqual(rows[3][ResultCsvColumns.PID], '4')
        self.assertEqual(rows[3][ResultCsvColumns.BIOBANK_ORDER_ID], '4')
        self.assertEqual(rows[3][ResultCsvColumns.NY_FLAG], 'Y')
        self.assertEqual(rows[3][ResultCsvColumns.GENOME_TYPE], 'aou_wgs')
        self.assertEqual(rows[3][ResultCsvColumns.SEX_AT_BIRTH], 'F')

    def _create_fake_genomic_set(self, genomic_set_name, genomic_set_criteria,
                                 genomic_set_filename):
        now = clock.CLOCK.now()
        genomic_set = GenomicSet()
        genomic_set.genomicSetName = genomic_set_name
        genomic_set.genomicSetCriteria = genomic_set_criteria
        genomic_set.genomicSetFile = genomic_set_filename
        genomic_set.genomicSetFileTime = now
        genomic_set.genomicSetStatus = GenomicSetStatus.INVALID

        set_dao = GenomicSetDao()
        genomic_set.genomicSetVersion = set_dao.get_new_version_number(
            genomic_set.genomicSetName)

        set_dao.insert(genomic_set)

        return genomic_set

    def _create_fake_genomic_member(
            self,
            genomic_set_id,
            participant_id,
            biobank_order_id,
            validation_status=GenomicValidationStatus.VALID,
            sex_at_birth='F',
            genome_type='aou_array',
            ny_flag='Y'):
        genomic_set_member = GenomicSetMember()
        genomic_set_member.genomicSetId = genomic_set_id
        genomic_set_member.validationStatus = validation_status
        genomic_set_member.participantId = participant_id
        genomic_set_member.sexAtBirth = sex_at_birth
        genomic_set_member.genomeType = genome_type
        genomic_set_member.nyFlag = 1 if ny_flag == 'Y' else 0
        genomic_set_member.biobankOrderId = biobank_order_id

        member_dao = GenomicSetMemberDao()
        member_dao.insert(genomic_set_member)

    def _naive_utc_to_naive_central(self, naive_utc_date):
        utc_date = pytz.utc.localize(naive_utc_date)
        central_date = utc_date.astimezone(pytz.timezone('US/Central'))
        return central_date.replace(tzinfo=None)

    def _find_latest_genomic_set_csv(self, cloud_bucket_name, keyword=None):
        bucket_stat_list = cloudstorage_api.listbucket('/' + cloud_bucket_name)
        if not bucket_stat_list:
            raise RuntimeError('No files in cloud bucket %r.' %
                               cloud_bucket_name)
        bucket_stat_list = [
            s for s in bucket_stat_list if s.filename.lower().endswith('.csv')
        ]
        if not bucket_stat_list:
            raise RuntimeError('No CSVs in cloud bucket %r (all files: %s).' %
                               (cloud_bucket_name, bucket_stat_list))
        if keyword:
            buckt_stat_keyword_list = []
            for item in bucket_stat_list:
                if keyword in item.filename:
                    buckt_stat_keyword_list.append(item)
            if buckt_stat_keyword_list:
                buckt_stat_keyword_list.sort(key=lambda s: s.st_ctime)
                return buckt_stat_keyword_list[-1].filename
            else:
                raise RuntimeError(
                    'No CSVs in cloud bucket %r with keyword %s (all files: %s).'
                    % (cloud_bucket_name, keyword, bucket_stat_list))
        bucket_stat_list.sort(key=lambda s: s.st_ctime)
        return bucket_stat_list[-1].filename
Exemple #23
0
class SiteDaoTest(SqlTestBase):
    def setUp(self):
        super(SiteDaoTest, self).setUp()
        self.site_dao = SiteDao()
        self.participant_dao = ParticipantDao()
        self.ps_dao = ParticipantSummaryDao()
        self.ps_history = ParticipantHistoryDao()

    def test_get_no_sites(self):
        self.assertIsNone(self.site_dao.get(9999))
        self.assertIsNone(
            self.site_dao.get_by_google_group('*****@*****.**'))

    def test_insert(self):
        site = Site(siteName='site',
                    googleGroup='*****@*****.**',
                    mayolinkClientNumber=12345,
                    hpoId=PITT_HPO_ID)
        created_site = self.site_dao.insert(site)
        new_site = self.site_dao.get(created_site.siteId)
        site.siteId = created_site.siteId
        self.assertEquals(site.asdict(), new_site.asdict())
        self.assertEquals(
            site.asdict(),
            self.site_dao.get_by_google_group(
                '*****@*****.**').asdict())

    def test_update(self):
        site = Site(siteName='site',
                    googleGroup='*****@*****.**',
                    mayolinkClientNumber=12345,
                    hpoId=PITT_HPO_ID)
        created_site = self.site_dao.insert(site)
        new_site = Site(siteId=created_site.siteId,
                        siteName='site2',
                        googleGroup='*****@*****.**',
                        mayolinkClientNumber=123456,
                        hpoId=UNSET_HPO_ID)
        self.site_dao.update(new_site)
        fetched_site = self.site_dao.get(created_site.siteId)
        self.assertEquals(new_site.asdict(), fetched_site.asdict())
        self.assertEquals(
            new_site.asdict(),
            self.site_dao.get_by_google_group(
                '*****@*****.**').asdict())
        self.assertIsNone(
            self.site_dao.get_by_google_group('*****@*****.**'))

    def test_participant_pairing_updates_on_change(self):
        TIME = datetime.datetime(2018, 1, 1)
        TIME2 = datetime.datetime(2018, 1, 2)
        provider_link = '[{"organization": {"reference": "Organization/AZ_TUCSON"}, "primary": true}]'
        site = Site(siteName='site',
                    googleGroup='*****@*****.**',
                    mayolinkClientNumber=12345,
                    hpoId=PITT_HPO_ID,
                    organizationId=PITT_ORG_ID)
        created_site = self.site_dao.insert(site)

        with FakeClock(TIME):
            p = Participant(participantId=1,
                            biobankId=2,
                            siteId=created_site.siteId)
            self.participant_dao.insert(p)
            fetch_p = self.participant_dao.get(p.participantId)
            updated_p = self.participant_dao.get(fetch_p.participantId)
            p_summary = self.ps_dao.insert(self.participant_summary(updated_p))

        with FakeClock(TIME2):
            update_site_parent = Site(siteId=created_site.siteId,
                                      siteName='site2',
                                      googleGroup='*****@*****.**',
                                      mayolinkClientNumber=123456,
                                      hpoId=AZ_HPO_ID,
                                      organizationId=AZ_ORG_ID)
            self.site_dao.update(update_site_parent)

        updated_p = self.participant_dao.get(fetch_p.participantId)
        ps = self.ps_dao.get(p_summary.participantId)
        ph = self.ps_history.get([updated_p.participantId, 1])

        self.assertEquals(update_site_parent.hpoId, updated_p.hpoId)
        self.assertEquals(update_site_parent.organizationId,
                          updated_p.organizationId)
        self.assertEquals(ps.organizationId, update_site_parent.organizationId)
        self.assertEquals(ps.hpoId, update_site_parent.hpoId)
        self.assertEquals(ps.organizationId, update_site_parent.organizationId)
        self.assertEquals(ph.organizationId, update_site_parent.organizationId)
        self.assertEquals(updated_p.providerLink, provider_link)
        self.assertEquals(ps.lastModified, TIME2)
class ParticipantCountsOverTimeApiTest(FlaskTestBase):

  def setUp(self):
    super(ParticipantCountsOverTimeApiTest, self).setUp(use_mysql=True)
    self.dao = ParticipantDao()
    self.ps_dao = ParticipantSummaryDao()
    self.ps = ParticipantSummary()
    self.calendar_dao = CalendarDao()
    self.hpo_dao = HPODao()

    # Needed by ParticipantCountsOverTimeApi
    self.hpo_dao.insert(HPO(hpoId=TEST_HPO_ID, name=TEST_HPO_NAME, displayName='Test',
                       organizationType=OrganizationType.UNSET))

    self.time1 = datetime.datetime(2017, 12, 31)
    self.time2 = datetime.datetime(2018, 1, 1)
    self.time3 = datetime.datetime(2018, 1, 2)
    self.time4 = datetime.datetime(2018, 1, 3)

    # Insert 2 weeks of dates
    curr_date = datetime.date(2017, 12, 22)
    for _ in xrange(0, 14):
      calendar_day = Calendar(day=curr_date )
      CalendarDao().insert(calendar_day)
      curr_date = curr_date + datetime.timedelta(days=1)

  def _insert(self, participant, first_name=None, last_name=None, hpo_name=None,
              unconsented=False, time_int=None, time_mem=None, time_fp=None):
    """
    Create a participant in a transient test database.

    :param participant: Participant object
    :param first_name: First name
    :param last_name: Last name
    :param hpo_name: HPO name (one of PITT or AZ_TUCSON)
    :param time_int: Time that participant fulfilled INTERESTED criteria
    :param time_mem: Time that participant fulfilled MEMBER criteria
    :param time_fp: Time that participant fulfilled FULL_PARTICIPANT criteria
    :return: Participant object
    """

    if unconsented is True:
      enrollment_status = None
    elif time_mem is None:
      enrollment_status = EnrollmentStatus.INTERESTED
    elif time_fp is None:
      enrollment_status = EnrollmentStatus.MEMBER
    else:
      enrollment_status = EnrollmentStatus.FULL_PARTICIPANT

    with FakeClock(time_int):
      self.dao.insert(participant)

    participant.providerLink = make_primary_provider_link_for_name(hpo_name)
    with FakeClock(time_mem):
      self.dao.update(participant)

    if enrollment_status is None:
      return None

    summary = self.participant_summary(participant)

    if first_name:
      summary.firstName = first_name
    if last_name:
      summary.lastName = last_name

    summary.dateOfBirth = datetime.date(1978, 10, 10)

    summary.enrollmentStatus = enrollment_status

    summary.hpoId = self.hpo_dao.get_by_name(hpo_name).hpoId

    if time_mem is not None:
      with FakeClock(time_mem):
        summary.consentForElectronicHealthRecordsTime = time_mem

    if time_fp is not None:
      with FakeClock(time_fp):
        summary.consentForElectronicHealthRecordsTime = time_fp
        summary.questionnaireOnTheBasicsTime = time_fp
        summary.questionnaireOnLifestyleTime = time_fp
        summary.questionnaireOnOverallHealthTime = time_fp
        summary.physicalMeasurementsFinalizedTime = time_fp
        summary.sampleOrderStatus1ED04Time = time_fp
        summary.sampleOrderStatus1SALTime = time_fp

    self.ps_dao.insert(summary)

    return summary

  def test_get_counts_with_default_parameters(self):
    # The most basic test in this class

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    # TODO: remove bucketSize from these parameters in all tests
    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    # You can debug API responses easily by uncommenting the lines below
    # print('response')
    # print(response)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 1)

  def test_get_counts_with_single_awardee_filter(self):
    # Does the awardee filter work?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p1 = Participant(participantId=2, biobankId=5)
    self._insert(p1, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p1 = Participant(participantId=3, biobankId=6)
    self._insert(p1, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    # enrollmentStatus param left blank to test we can handle it
    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=PITT
      &enrollmentStatus=
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 1)

    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=AZ_TUCSON
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 2)

  def test_get_counts_with_single_awardee_filter(self):
    # Does the awardee filter work when passed a single awardee?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p1 = Participant(participantId=2, biobankId=5)
    self._insert(p1, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p1 = Participant(participantId=3, biobankId=6)
    self._insert(p1, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=PITT
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=AZ_TUCSON
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 2)

  def test_get_counts_with_multiple_awardee_filters(self):
    # Does the awardee filter work when passed more than one awardee?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=PITT,AZ_TUCSON
        &enrollmentStatus=
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 3)

  def test_get_counts_with_enrollment_status_member_filter(self):

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1, time_mem=self.time2)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1, time_mem=self.time2)

    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1, time_mem=self.time3)

    # awardee param intentionally left blank to test we can handle it
    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=
      &enrollmentStatus=MEMBER
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    member_count_day_1 = response[0]['metrics']['MEMBER']
    member_count_day_2 = response[1]['metrics']['MEMBER']
    member_count_day_3 = response[2]['metrics']['MEMBER']
    member_count_day_4 = response[3]['metrics']['MEMBER']
    interested_count_day_4 = response[1]['metrics']['INTERESTED']

    self.assertEquals(member_count_day_1, 0)
    self.assertEquals(member_count_day_2, 0)
    self.assertEquals(member_count_day_3, 2)
    self.assertEquals(member_count_day_4, 3)
    self.assertEquals(interested_count_day_4, 0)

    qs = """
      bucketSize=1
      &stratification=TOTAL
      &startDate=2017-12-30
      &endDate=2018-01-04
      &enrollmentStatus=MEMBER
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)

  def test_get_counts_with_enrollment_status_full_participant_filter(self):

    # MEMBER @ time 1
    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1,
                 time_mem=self.time1)

    # FULL PARTICIPANT @ time 2
    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 2
    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 3
    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time3)

    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &enrollmentStatus=FULL_PARTICIPANT
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    full_participant_count_day_1 = response[0]['metrics']['FULL_PARTICIPANT']
    full_participant_count_day_2 = response[1]['metrics']['FULL_PARTICIPANT']
    full_participant_count_day_3 = response[2]['metrics']['FULL_PARTICIPANT']
    full_participant_count_day_4 = response[3]['metrics']['FULL_PARTICIPANT']
    member_count_day_4 = response[4]['metrics']['MEMBER']

    self.assertEquals(full_participant_count_day_1, 0)
    self.assertEquals(full_participant_count_day_2, 0)
    self.assertEquals(full_participant_count_day_3, 2)
    self.assertEquals(full_participant_count_day_4, 3)
    self.assertEquals(member_count_day_4, 0)  # Excluded per enrollmentStatus parameter

  def test_get_counts_with_total_enrollment_status_full_participant_filter(self):
    # When filtering with TOTAL stratification, filtered participants are
    # returned by their sign up date, not the date they reached their highest
    # enrollment status.

    # MEMBER @ time 1
    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1,
                 time_mem=self.time1)

    # FULL PARTICIPANT @ time 2
    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 2
    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time2)

    # FULL PARTICIPANT @ time 3
    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1,
                 time_mem=self.time1, time_fp=self.time3)

    qs = """
      bucketSize=1
      &stratification=TOTAL
      &startDate=2017-12-30
      &endDate=2018-01-04
      &enrollmentStatus=FULL_PARTICIPANT
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']
    total_count_day_3 = response[2]['metrics']['TOTAL']
    total_count_day_4 = response[3]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)
    self.assertEquals(total_count_day_3, 3)
    self.assertEquals(total_count_day_4, 3)

  def test_get_counts_with_single_various_filters(self):
    # Do the awardee and enrollment status filters work when passed single values?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1,
                 time_mem=self.time1)

    p4 = Participant(participantId=4, biobankId=7)
    self._insert(p4, 'Debra', 'Dinosaur', 'PITT', time_int=self.time1,
                 time_mem=self.time1)

    qs = """
      bucketSize=1
      &stratification=ENROLLMENT_STATUS
      &startDate=2017-12-30
      &endDate=2018-01-04
      &awardee=PITT
      &enrollmentStatus=MEMBER
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']
    member_count_day_2 = response[1]['metrics']['MEMBER']

    self.assertEquals(interested_count_day_1, 0)

    # We requested data for only MEMBERs, so no INTERESTEDs should be returned
    self.assertEquals(interested_count_day_2, 0)

    # We requested data for only MEMBERs in PITT, so no MEMBERs in AZ_TUCSON should be returned
    self.assertEquals(member_count_day_2, 1)

  def test_get_counts_with_multiple_various_filters(self):
    # Do the awardee and enrollment status filters work when passed multiple values?

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &awardee=AZ_TUCSON,PITT
        &enrollmentStatus=INTERESTED,MEMBER,FULL_PARTICIPANT
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_1 = response[0]['metrics']['INTERESTED']
    interested_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(interested_count_day_1, 0)
    self.assertEquals(interested_count_day_2, 3)

  def test_get_counts_with_total_stratification_unfiltered(self):

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
      bucketSize=1
      &stratification=TOTAL
      &startDate=2017-12-30
      &endDate=2018-01-04
      """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)

  def test_get_counts_excluding_interested_participants(self):
    # When filtering only for MEMBER, no INTERESTED (neither consented nor unconsented) should be counted

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'UNSET', unconsented=True, time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1, time_mem=self.time1)

    qs = """
        bucketSize=1
        &stratification=ENROLLMENT_STATUS
        &startDate=2017-12-30
        &endDate=2018-01-04
        &enrollmentStatus=MEMBER
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    interested_count_day_2 = response[1]['metrics']['INTERESTED']
    member_count_day_2 = response[1]['metrics']['MEMBER']

    self.assertEquals(interested_count_day_2, 0)
    self.assertEquals(member_count_day_2, 1)

  def test_get_counts_excluding_withdrawn_participants(self):
    # Withdrawn participants should not appear in counts

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'PITT', time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    ps3 = self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)
    ps3.withdrawalStatus = WithdrawalStatus.NO_USE  # Chad withdrew from the study
    self.ps_dao.update(ps3)

    qs = """
        bucketSize=1
        &stratification=TOTAL
        &startDate=2017-12-30
        &endDate=2018-01-04
        """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['TOTAL']
    total_count_day_2 = response[1]['metrics']['TOTAL']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 2)

  def test_get_counts_for_unconsented_individuals(self):
    # Those who have signed up but not consented should be INTERESTED

    p1 = Participant(participantId=1, biobankId=4)
    self._insert(p1, 'Alice', 'Aardvark', 'UNSET', unconsented=True, time_int=self.time1)

    p2 = Participant(participantId=2, biobankId=5)
    self._insert(p2, 'Bob', 'Builder', 'AZ_TUCSON', time_int=self.time1)

    p3 = Participant(participantId=3, biobankId=6)
    self._insert(p3, 'Chad', 'Caterpillar', 'AZ_TUCSON', time_int=self.time1)

    qs = """
          bucketSize=1
          &stratification=ENROLLMENT_STATUS
          &startDate=2017-12-30
          &endDate=2018-01-04
          """

    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs)

    total_count_day_1 = response[0]['metrics']['INTERESTED']
    total_count_day_2 = response[1]['metrics']['INTERESTED']

    self.assertEquals(total_count_day_1, 0)
    self.assertEquals(total_count_day_2, 3)

  def test_url_parameter_validation_for_date_range(self):
    # Ensure requests for very long date ranges are marked BAD REQUEST

    qs = """
        bucketSize=1
        &stratification=TOTAL
        &startDate=2017-12-30
        &endDate=2217-12-30
        """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)

  def test_url_parameter_validation_for_stratifications(self):
    # Ensure requests invalid stratifications are marked BAD REQUEST

    qs = """
          bucketSize=1
          &stratification=FOOBAR
          &startDate=2017-12-30
          &endDate=2018-01-04
          """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)

  def test_url_parameter_validation_for_awardee(self):
    # Ensure requests invalid awardee are marked BAD REQUEST

    qs = """
            bucketSize=1
            &stratification=ENROLLMENT_STATUS
            &startDate=2017-12-30
            &endDate=2018-01-04
            &awardee=FOOBAR
            """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)

  def test_url_parameter_validation_for_enrollment_status(self):
    # Ensure requests invalid enrollment status are marked BAD REQUEST

    qs = """
            bucketSize=1
            &stratification=ENROLLMENT_STATUS
            &startDate=2017-12-30
            &endDate=2018-01-04
            &enrollmentStatus=FOOBAR
            """
    qs = ''.join(qs.split())  # Remove all whitespace

    response = self.send_get('ParticipantCountsOverTime', query_string=qs,
                             expected_status=httplib.BAD_REQUEST)
    self.assertEquals(response, None)
Exemple #25
0
class PatientStatusTestBase(FlaskTestBase):
    def setUp(self, use_mysql=True, with_data=True, with_consent_codes=False):
        super(PatientStatusTestBase, self).\
            setUp(use_mysql=use_mysql, with_data=with_data, with_consent_codes=with_consent_codes)

        self.test_data = {
            "subject": "Patient/P123456789",
            "awardee": "PITT",
            "organization": "PITT_BANNER_HEALTH",
            "patient_status": "YES",
            "user": "******",
            "site": "hpo-site-monroeville",
            "authored": "2019-04-26T12:11:41Z",
            "comment": "This is comment"
        }

        self.dao = PatientStatusDao()
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()

        self.participant = Participant(participantId=123456789, biobankId=7)
        self.participant_dao.insert(self.participant)
        self.summary = self.participant_summary(self.participant)
        self.summary_dao.insert(self.summary)

    def test_patient_status(self):

        data = copy.copy(self.test_data)
        model = self.dao.from_client_json(
            data, participant_id=self.participant.participantId)
        self.dao.insert(model)
        result = self.dao.get(self.participant.participantId,
                              data['organization'])

        self.assertEqual(result['subject'], data['subject'])
        self.assertEqual(result['organization'], data['organization'])
        self.assertEqual(result['site'], data['site'])
        self.assertEqual(parse(result['authored']),
                         parse(data['authored']).replace(tzinfo=None))
        self.assertEqual(result['comment'], data['comment'])

        # Test changing site
        data['authored'] = '2019-04-27T16:32:01Z'
        data['comment'] = 'saw patient at new site'
        data['site'] = 'hpo-site-bannerphoenix'
        model = self.dao.from_client_json(
            data, participant_id=self.participant.participantId)
        self.dao.update(model)
        result = self.dao.get(self.participant.participantId,
                              data['organization'])

        self.assertEqual(result['subject'], data['subject'])
        self.assertEqual(result['organization'], data['organization'])
        self.assertEqual(result['site'], data['site'])
        self.assertEqual(parse(result['authored']),
                         parse(data['authored']).replace(tzinfo=None))
        self.assertEqual(result['comment'], data['comment'])

    def test_patient_status_query(self):

        with self.dao.session() as session:
            query = self.dao._build_response_query(
                session, self.participant.participantId, 3)
            sql = self.dao.query_to_text(query)
            self.assertIsNotNone(sql)
class UpdateEhrStatusUpdatesTestCase(SqlTestBase):
    def setUp(self, **kwargs):
        super(UpdateEhrStatusUpdatesTestCase, self).setUp(use_mysql=True,
                                                          **kwargs)
        self.hpo_dao = HPODao()
        self.org_dao = OrganizationDao()
        self.participant_dao = ParticipantDao()
        self.summary_dao = ParticipantSummaryDao()
        self.ehr_receipt_dao = EhrReceiptDao()

        self.hpo_foo = self._make_hpo(int_id=10, string_id='hpo_foo')
        self.hpo_bar = self._make_hpo(int_id=11, string_id='hpo_bar')

        self.org_foo_a = self._make_org(hpo=self.hpo_foo,
                                        int_id=10,
                                        external_id='FOO_A')
        self.org_foo_b = self._make_org(hpo=self.hpo_foo,
                                        int_id=11,
                                        external_id='FOO_B')
        self.org_bar_a = self._make_org(hpo=self.hpo_bar,
                                        int_id=12,
                                        external_id='BAR_A')

        self.participants = [
            self._make_participant(hpo=self.hpo_foo,
                                   org=self.org_foo_a,
                                   int_id=11),
            self._make_participant(hpo=self.hpo_foo,
                                   org=self.org_foo_b,
                                   int_id=12),
            self._make_participant(hpo=self.hpo_bar,
                                   org=self.org_bar_a,
                                   int_id=13),
            self._make_participant(hpo=self.hpo_bar,
                                   org=self.org_bar_a,
                                   int_id=14),
        ]

    def _make_hpo(self, int_id, string_id):
        hpo = HPO(hpoId=int_id, name=string_id)
        self.hpo_dao.insert(hpo)
        return hpo

    def _make_org(self, hpo, int_id, external_id):
        org = Organization(organizationId=int_id,
                           externalId=external_id,
                           displayName='SOME ORG',
                           hpoId=hpo.hpoId)
        self.org_dao.insert(org)
        return org

    def _make_participant(self, hpo, org, int_id):
        participant = self._participant_with_defaults(participantId=int_id,
                                                      biobankId=int_id)
        participant.hpoId = hpo.hpoId
        participant.organizationId = org.organizationId
        self.participant_dao.insert(participant)
        summary = self.participant_summary(participant)
        self.summary_dao.insert(summary)
        return participant, summary

    # Mock BigQuery result types
    EhrUpdatePidRow = collections.namedtuple('EhrUpdatePidRow', [
        'person_id',
    ])
    TableCountsRow = collections.namedtuple('TableCountsRow', [
        'org_id',
        'person_upload_time',
    ])

    @mock.patch('offline.update_ehr_status.update_organizations_from_job')
    @mock.patch(
        'offline.update_ehr_status.update_participant_summaries_from_job')
    @mock.patch('offline.update_ehr_status.make_update_organizations_job')
    @mock.patch(
        'offline.update_ehr_status.make_update_participant_summaries_job')
    def test_skips_when_no_job(self, mock_summary_job, mock_organization_job,
                               mock_update_summaries,
                               mock_update_organizations):
        mock_summary_job.return_value = None
        mock_organization_job.return_value = None

        with FakeClock(datetime.datetime(2019, 1, 1)):
            offline.update_ehr_status.update_ehr_status()

        self.assertFalse(mock_update_summaries.called)
        self.assertFalse(mock_update_organizations.called)

    @mock.patch('offline.update_ehr_status.make_update_organizations_job')
    @mock.patch(
        'offline.update_ehr_status.make_update_participant_summaries_job')
    def test_updates_participant_summaries(self, mock_summary_job,
                                           mock_organization_job):
        mock_summary_job.return_value.__iter__.return_value = [[
            self.EhrUpdatePidRow(11),
        ]]
        mock_organization_job.return_value.__iter__.return_value = []
        with FakeClock(datetime.datetime(2019, 1, 1)):
            offline.update_ehr_status.update_ehr_status()

        mock_summary_job.return_value.__iter__.return_value = [[
            self.EhrUpdatePidRow(11),
            self.EhrUpdatePidRow(12),
        ]]
        mock_organization_job.return_value.__iter__.return_value = []
        with FakeClock(datetime.datetime(2019, 1, 2)):
            offline.update_ehr_status.update_ehr_status()

        summary = self.summary_dao.get(11)
        self.assertEqual(summary.ehrStatus, EhrStatus.PRESENT)
        self.assertEqual(summary.ehrReceiptTime, datetime.datetime(2019, 1, 1))
        self.assertEqual(summary.ehrUpdateTime, datetime.datetime(2019, 1, 2))

        summary = self.summary_dao.get(12)
        self.assertEqual(summary.ehrStatus, EhrStatus.PRESENT)
        self.assertEqual(summary.ehrReceiptTime, datetime.datetime(2019, 1, 2))
        self.assertEqual(summary.ehrUpdateTime, datetime.datetime(2019, 1, 2))

    @mock.patch('offline.update_ehr_status.make_update_organizations_job')
    @mock.patch(
        'offline.update_ehr_status.make_update_participant_summaries_job')
    def test_creates_receipts(self, mock_summary_job, mock_organization_job):
        mock_summary_job.return_value.__iter__.return_value = []
        mock_organization_job.return_value.__iter__.return_value = [
            [
                self.TableCountsRow(org_id='FOO_A',
                                    person_upload_time=datetime.datetime(
                                        2019, 1, 1).replace(tzinfo=pytz.UTC)),
            ],
        ]
        with FakeClock(datetime.datetime(2019, 1, 1)):
            offline.update_ehr_status.update_ehr_status()

        foo_a_receipts = self.ehr_receipt_dao.get_by_organization_id(
            self.org_foo_a.organizationId)
        self.assertEqual(len(foo_a_receipts), 1)
        self.assertEqual(foo_a_receipts[0].receiptTime,
                         datetime.datetime(2019, 1, 1))

        foo_b_receipts = self.ehr_receipt_dao.get_by_organization_id(
            self.org_foo_b.organizationId)
        self.assertEqual(len(foo_b_receipts), 0)

        mock_summary_job.return_value.__iter__.return_value = []
        mock_organization_job.return_value.__iter__.return_value = [
            [
                self.TableCountsRow(org_id='FOO_A',
                                    person_upload_time=datetime.datetime(
                                        2019, 1, 1).replace(tzinfo=pytz.UTC)),
                self.TableCountsRow(org_id='FOO_A',
                                    person_upload_time=datetime.datetime(
                                        2019, 1, 2).replace(tzinfo=pytz.UTC)),
                self.TableCountsRow(org_id='FOO_B',
                                    person_upload_time=datetime.datetime(
                                        2019, 1, 2).replace(tzinfo=pytz.UTC)),
            ],
        ]
        with FakeClock(datetime.datetime(2019, 1, 2)):
            offline.update_ehr_status.update_ehr_status()

        foo_a_receipts = self.ehr_receipt_dao.get_by_organization_id(
            self.org_foo_a.organizationId)
        self.assertEqual(len(foo_a_receipts), 2)
        self.assertEqual(foo_a_receipts[0].receiptTime,
                         datetime.datetime(2019, 1, 1))
        self.assertEqual(foo_a_receipts[1].receiptTime,
                         datetime.datetime(2019, 1, 2))

        foo_b_receipts = self.ehr_receipt_dao.get_by_organization_id(
            self.org_foo_b.organizationId)
        self.assertEqual(len(foo_b_receipts), 1)
        self.assertEqual(foo_b_receipts[0].receiptTime,
                         datetime.datetime(2019, 1, 2))

    @mock.patch('offline.update_ehr_status.make_update_organizations_job')
    @mock.patch(
        'offline.update_ehr_status.make_update_participant_summaries_job')
    def test_ignores_bad_data(self, mock_summary_job, mock_organization_job):
        invalid_participant_id = -1
        mock_summary_job.return_value.__iter__.return_value = [[
            self.EhrUpdatePidRow(invalid_participant_id),
        ]]
        mock_organization_job.return_value.__iter__.return_value = [
            [
                self.TableCountsRow(
                    org_id='FOO_A',
                    person_upload_time="an invalid date string"),
                self.TableCountsRow(org_id='AN_ORG_THAT_DOESNT_EXIST',
                                    person_upload_time=datetime.datetime(
                                        2019, 1, 1).replace(tzinfo=pytz.UTC)),
                self.TableCountsRow(org_id='AN_ORG_THAT_DOESNT_EXIST',
                                    person_upload_time=None),
            ],
        ]
        with FakeClock(datetime.datetime(2019, 1, 1)):
            offline.update_ehr_status.update_ehr_status()

        foo_a_receipts = self.ehr_receipt_dao.get_all()
        self.assertEqual(len(foo_a_receipts), 0)