Beispiel #1
0
 def child_cases(self):
     from corehq.apps.api.util import case_to_es_case
     accessor = CaseAccessors(self.domain)
     return {
         index.case_id: case_to_es_case(accessor.get_case(index.case_id))
         for index in self._reverse_indices
     }
Beispiel #2
0
 def parent_cases(self):
     from corehq.apps.api.util import case_to_es_case
     accessor = CaseAccessors(self.domain)
     return {
         index['identifier']: case_to_es_case(accessor.get_case(index['referenced_id']))
         for index in self.indices
     }
Beispiel #3
0
def _update_case(domain, case_id, server_modified_on, last_visit_date=None):
    accessors = CaseAccessors(domain)
    case = accessors.get_case(case_id)
    case.server_modified_on = server_modified_on
    if last_visit_date:
        set_case_property_directly(case, 'last_visit_date', last_visit_date.strftime('%Y-%m-%d'))
    _save_case(domain, case)
Beispiel #4
0
def _delete_all_cases(domain_name):
    logger.info('Deleting cases...')
    case_accessor = CaseAccessors(domain_name)
    case_ids = case_accessor.get_case_ids_in_domain()
    for case_id_chunk in chunked(with_progress_bar(case_ids, stream=silence_during_tests()), 500):
        case_accessor.soft_delete_cases(list(case_id_chunk))
    logger.info('Deleting cases complete.')
Beispiel #5
0
def recalculate_stagnant_cases():
    domain = 'icds-cas'
    config_ids = [
        'static-icds-cas-static-ccs_record_cases_monthly_v2',
        'static-icds-cas-static-ccs_record_cases_monthly_tableau_v2',
        'static-icds-cas-static-child_cases_monthly_v2',
    ]

    stagnant_cases = set()

    for config_id in config_ids:
        config, is_static = get_datasource_config(config_id, domain)
        adapter = get_indicator_adapter(config)
        case_ids = _find_stagnant_cases(adapter)
        celery_task_logger.info(
            "Found {} stagnant cases in config {}".format(len(case_ids), config_id)
        )
        stagnant_cases = stagnant_cases.union(set(case_ids))
        celery_task_logger.info(
            "Total number of stagant cases is now {}".format(len(stagnant_cases))
        )

    case_accessor = CaseAccessors(domain)
    num_stagnant_cases = len(stagnant_cases)
    current_case_num = 0
    for case_ids in chunked(stagnant_cases, 1000):
        current_case_num += len(case_ids)
        cases = case_accessor.get_cases(list(case_ids))
        for case in cases:
            publish_case_saved(case, send_post_save_signal=False)
        celery_task_logger.info(
            "Resaved {} / {} cases".format(current_case_num, num_stagnant_cases)
        )
Beispiel #6
0
    def test_edit_submissions_simple(self):
        initial_quantity = 100
        form = submit_case_blocks(
            case_blocks=get_single_balance_block(quantity=initial_quantity, **self._stock_state_key),
            domain=self.domain,
        )[0]
        self._assert_stats(1, initial_quantity, initial_quantity)

        case_accessors = CaseAccessors(self.domain)
        case = case_accessors.get_case(self.case.case_id)
        try:
            self.assertTrue(any([action.is_ledger_transaction for action in case.actions]))
        except AttributeError:
            self.assertTrue('commtrack' in [action.action_type for action in case.actions])
        self.assertEqual([form.form_id], case.xform_ids[1:])

        # change the value to 50
        edit_quantity = 50
        submit_case_blocks(
            case_blocks=get_single_balance_block(quantity=edit_quantity, **self._stock_state_key),
            domain=self.domain,
            form_id=form.form_id,
        )
        case = case_accessors.get_case(self.case.case_id)

        try:
            # CaseTransaction
            self.assertTrue(any([action.is_ledger_transaction for action in case.actions]))
        except AttributeError:
            # CaseAction
            self.assertTrue('commtrack' in [action.action_type for action in case.actions])

        self._assert_stats(1, edit_quantity, edit_quantity)
        self.assertEqual([form.form_id], case.xform_ids[1:])
class Command(BaseCommand):

    def add_arguments(self, parser):
        parser.add_argument('infile')
        parser.add_argument('outfile')

    def handle(self, infile, outfile, *args, **options):
        self.case_accessor = CaseAccessors('icds-cas')
        with open(infile, 'r', encoding='utf-8') as old, open(outfile, 'w', encoding='utf-8') as new:
            reader = csv.reader(old)
            writer = csv.writer(new)
            headers = next(reader)
            writer.writerow(headers)
            for row in reader:
                case_id = row[4]
                hh_id = row[10]
                if hh_id:
                    person, hh = self.case_accessor.get_cases([case_id, hh_id], ordered=True)
                else:
                    person = self.case_accessor.get_case(case_id)
                    hh = None
                if hh:
                    row[18] = hh.get_case_property('name')
                    row[19] = hh.get_case_property('hh_num')
                row[20] = person.get_case_property('name')
                writer.writerow(row)
Beispiel #8
0
def lookup_case(search_field, search_id, domain, case_type):
    """
    Attempt to find the case in CouchDB by the provided search_field and search_id.

    Returns a tuple with case (if found) and an
    error code (if there was an error in lookup).
    """
    found = False
    case_accessors = CaseAccessors(domain)
    if search_field == 'case_id':
        try:
            case = case_accessors.get_case(search_id)
            if case.domain == domain and case.type == case_type:
                found = True
        except CaseNotFound:
            pass
    elif search_field == EXTERNAL_ID:
        cases_by_type = case_accessors.get_cases_by_external_id(search_id, case_type=case_type)
        if not cases_by_type:
            return (None, LookupErrors.NotFound)
        elif len(cases_by_type) > 1:
            return (None, LookupErrors.MultipleResults)
        else:
            case = cases_by_type[0]
            found = True

    if found:
        return (case, None)
    else:
        return (None, LookupErrors.NotFound)
Beispiel #9
0
    def test_xform_ids_diff(self):
        case_id = uuid.uuid4().hex
        submit_case_blocks(
            CaseBlock(
                case_id,
                case_type='migrate',
                create=True,
                update={'p1': 1},
            ).as_string(),
            self.domain_name
        )

        submit_case_blocks(
            CaseBlock(
                case_id,
                update={'p2': 2},
            ).as_string(),
            self.domain_name
        )
        case = CaseAccessors(self.domain_name).get_case(case_id)
        removed_form_id = case.xform_ids.pop(1)
        case.save()
        self.assertEqual(1, len(self._get_case_ids()))
        self._do_migration_and_assert_flags(self.domain_name)
        self.assertEqual(1, len(self._get_case_ids()))
        self._compare_diffs([
            (u'CommCareCase', FormJsonDiff(
                diff_type=u'set_mismatch', path=[u'xform_ids', u'[*]'],
                old_value=u'', new_value=removed_form_id
            ))
        ])
Beispiel #10
0
    def test_archiving_only_form(self):
        """
        Checks that archiving the only form associated with the case archives
        the case and unarchiving unarchives it.
        """
        case_id = _post_util(create=True, p1='p1-1', p2='p2-1')
        case_accessors = CaseAccessors(REBUILD_TEST_DOMAIN)
        case = case_accessors.get_case(case_id)

        self.assertFalse(case.is_deleted)
        if should_use_sql_backend(REBUILD_TEST_DOMAIN):
            self.assertEqual(1, len(case.actions))
        else:
            self.assertEqual(2, len(case.actions))
        [form_id] = case.xform_ids
        form = FormAccessors(REBUILD_TEST_DOMAIN).get_form(form_id)

        form.archive()
        case = case_accessors.get_case(case_id)

        self.assertTrue(case.is_deleted)
        # should just have the 'rebuild' action
        self.assertEqual(1, len(case.actions))
        self.assertTrue(case.actions[0].is_case_rebuild)

        form.unarchive()
        case = case_accessors.get_case(case_id)
        self.assertFalse(case.is_deleted)
        self.assertEqual(3, len(case.actions))
        self.assertTrue(case.actions[-1].is_case_rebuild)
Beispiel #11
0
def get_parent_of_case(domain, case_id, parent_case_type):
    case_accessor = CaseAccessors(domain)
    try:
        if not isinstance(case_id, basestring):
            case_id = case_id.case_id

        child_case = case_accessor.get_case(case_id)
    except CaseNotFound:
        raise ENikshayCaseNotFound(
            "Couldn't find case: {}".format(case_id)
        )

    parent_case_ids = [
        indexed_case.referenced_id for indexed_case in child_case.indices
        if indexed_case.referenced_type == parent_case_type
    ]
    parent_cases = case_accessor.get_cases(parent_case_ids)
    open_parent_cases = [
        occurrence_case for occurrence_case in parent_cases
        if not occurrence_case.closed
    ]

    if not open_parent_cases:
        raise ENikshayCaseNotFound(
            "Couldn't find any open {} cases for id: {}".format(parent_case_type, case_id)
        )

    return open_parent_cases[0]
Beispiel #12
0
def _get_direct_dependencies(domain, cases):
    case_accessor = CaseAccessors(domain)
    extension_cases = set(case_accessor.get_extension_case_ids(cases))
    indexed_cases = set(case_accessor.get_indexed_case_ids(cases))
    return DirectDependencies(
        all=extension_cases | indexed_cases, indexed_cases=indexed_cases, extension_cases=extension_cases
    )
Beispiel #13
0
 def case_types(self):
     """
     :return: Set of all case types for the domain excluding the CallCenter case type.
     """
     case_types = CaseAccessors(self.domain).get_case_types()
     case_types.remove(self.cc_case_type)
     return case_types
Beispiel #14
0
def get_case_by_identifier(domain, identifier):
    # circular import
    from corehq.apps.api.es import CaseES
    case_es = CaseES(domain)
    case_accessors = CaseAccessors(domain)

    def _query_by_type(i_type):
        q = case_es.base_query(
            terms={
                i_type: identifier,
            },
            fields=['_id', i_type],
            size=1
        )
        response = case_es.run_query(q)
        raw_docs = response['hits']['hits']
        if raw_docs:
            return case_accessors.get_case(raw_docs[0]['_id'])

    # Try by any of the allowed identifiers
    for identifier_type in ALLOWED_CASE_IDENTIFIER_TYPES:
        case = _query_by_type(identifier_type)
        if case is not None:
            return case

    # Try by case id
    try:
        case_by_id = case_accessors.get_case(identifier)
        if case_by_id.domain == domain:
            return case_by_id
    except (CaseNotFound, KeyError):
        pass

    return None
Beispiel #15
0
    def test_update_adherence_confidence(self):
        self.create_case_structure()
        case_accessor = CaseAccessors(self.domain)
        adherence_dates = [
            datetime(2005, 7, 10),
            datetime(2016, 8, 10),
            datetime(2016, 8, 11),
        ]
        adherence_cases = self.create_adherence_cases(adherence_dates)

        update_adherence_confidence_level(
            self.domain,
            self.person_id,
            datetime(2016, 8, 10, tzinfo=pytz.UTC),
            datetime(2016, 8, 11, tzinfo=pytz.UTC),
            "new_confidence_level",
        )
        adherence_case_ids = [adherence_date.strftime("%Y-%m-%d") for adherence_date in adherence_dates]
        adherence_cases = {case.case_id: case for case in case_accessor.get_cases(adherence_case_ids)}

        self.assertEqual(
            adherence_cases[adherence_case_ids[0]].dynamic_case_properties()['adherence_confidence'],
            'medium',
        )
        self.assertEqual(
            adherence_cases[adherence_case_ids[1]].dynamic_case_properties()['adherence_confidence'],
            'new_confidence_level',
        )
        self.assertEqual(
            adherence_cases[adherence_case_ids[2]].dynamic_case_properties()['adherence_confidence'],
            'new_confidence_level',
        )
Beispiel #16
0
def get_cleanliness_flag_from_scratch(domain, owner_id):
    casedb = CaseAccessors(domain)
    footprint_info = get_case_footprint_info(domain, owner_id)
    owned_cases = footprint_info.base_ids
    cases_to_check = footprint_info.all_ids - owned_cases
    if cases_to_check:
        closed_owned_case_ids = set(casedb.get_closed_case_ids_for_owner(owner_id))
        cases_to_check = cases_to_check - closed_owned_case_ids - footprint_info.extension_ids
        # check extension cases that are unowned or owned by others
        extension_cases_to_check = footprint_info.extension_ids - closed_owned_case_ids - owned_cases
        while extension_cases_to_check:
            extension_case = extension_cases_to_check.pop()
            dependent_cases = set(get_dependent_case_info(domain, [extension_case]).all_ids)
            unowned_dependent_cases = dependent_cases - owned_cases
            extension_cases_to_check = extension_cases_to_check - dependent_cases
            dependent_cases_owned_by_other_owners = {
                dependent_case.case_id
                for dependent_case in casedb.get_cases(list(unowned_dependent_cases))
                if dependent_case.owner_id != UNOWNED_EXTENSION_OWNER_ID
            }
            if dependent_cases_owned_by_other_owners:
                hint_id = dependent_cases & owned_cases
                # can't get back from extension case to owned case e.g. host is a child of owned case
                if hint_id:
                    return CleanlinessFlag(False, hint_id.pop())

        if cases_to_check:
            # it wasn't in any of the open or closed IDs - it must be dirty
            reverse_index_infos = casedb.get_all_reverse_indices_info(list(cases_to_check))
            reverse_index_ids = set([r.case_id for r in reverse_index_infos])
            indexed_with_right_owner = (reverse_index_ids & (owned_cases | closed_owned_case_ids))
            found_deleted_cases = False
            while indexed_with_right_owner:
                hint_id = indexed_with_right_owner.pop()
                infos_for_this_owner = _get_info_by_case_id(reverse_index_infos, hint_id)
                for info in infos_for_this_owner:
                    try:
                        case = CaseAccessors(domain).get_case(info.referenced_id)
                        if not case.is_deleted:
                            return CleanlinessFlag(False, hint_id)
                        else:
                            found_deleted_cases = True
                    except ResourceNotFound:
                        # the case doesn't exist - don't use it as a dirty flag
                        found_deleted_cases = True

            if found_deleted_cases:
                # if we made it all the way to the end of the loop without returning anything
                # then the owner was only flagged as dirty due to missing cases,
                # This implies the owner is still clean.
                return CleanlinessFlag(True, None)
            else:
                # I don't believe code can ever be hit, but if it is we should fail hard
                # until we can better understand it.
                raise IllegalCaseId('Owner {} in domain {} has an invalid index reference chain!!'.format(
                    owner_id, domain
                ))

    return CleanlinessFlag(True, None)
Beispiel #17
0
def set_parent_case(domain, child_case, parent_case):
    server_modified_on = child_case.server_modified_on
    set_actual_parent_case(domain, child_case, parent_case)

    child_case = CaseAccessors(domain).get_case(child_case.case_id)
    child_case.server_modified_on = server_modified_on
    _save_case(domain, child_case)
    return CaseAccessors(domain).get_case(child_case.case_id)
Beispiel #18
0
def _get_direct_dependencies(domain, case_ids):
    assert not isinstance(case_ids, (six.text_type, bytes))
    case_accessor = CaseAccessors(domain)
    extension_cases = set(case_accessor.get_extension_case_ids(case_ids))
    indexed_cases = set(case_accessor.get_indexed_case_ids(case_ids))
    return DirectDependencies(
        all=extension_cases | indexed_cases,
        indexed_cases=indexed_cases,
        extension_cases=extension_cases
    )
Beispiel #19
0
def cases_referenced_by_xform(xform):
    """
    Returns a list of CommCareCase or CommCareCaseSQL given a JSON
    representation of an XFormInstance
    """
    assert xform.domain, "Form is missing 'domain'"
    from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
    case_ids = get_case_ids_from_form(xform)
    case_accessor = CaseAccessors(xform.domain)
    return list(case_accessor.get_cases(list(case_ids)))
class ExplodeLedgersTest(BaseSyncTest):
    def setUp(self):
        super(ExplodeLedgersTest, self).setUp()
        self.case_accessor = CaseAccessors(self.project.name)
        self.ledger_accessor = LedgerAccessors(self.project.name)
        self._create_ledgers()

    def tearDown(self):
        delete_all_ledgers()
        delete_all_cases()
        delete_all_xforms()
        super(ExplodeLedgersTest, self).tearDown()

    def _create_ledgers(self):
        case_type = 'case'

        case1 = CaseStructure(
            case_id='case1',
            attrs={'create': True, 'case_type': case_type},
        )
        case2 = CaseStructure(
            case_id='case2',
            attrs={'create': True, 'case_type': case_type},
        )  # case2 will have no ledgers
        self.ledgers = {
            'icecream': Balance(
                entity_id=case1.case_id,
                date=datetime(2017, 11, 21, 0, 0, 0, 0),
                section_id='test',
                entry=Entry(id='icecream', quantity=4),
            ),
            'blondie': Balance(
                entity_id=case1.case_id,
                date=datetime(2017, 11, 21, 0, 0, 0, 0),
                section_id='test',
                entry=Entry(id='blondie', quantity=5),
            )
        }
        self.device.post_changes([case1, case2])
        self.device.post_changes(list(self.ledgers.values()))

    def test_explode_ledgers(self):
        explode_cases(self.project.name, self.user_id, 5)
        cases = self.case_accessor.iter_cases(self.case_accessor.get_case_ids_in_domain())
        for case in cases:
            ledger_values = {l.entry_id: l for l in self.ledger_accessor.get_ledger_values_for_case(case.case_id)}

            if case.case_id == 'case2' or case.get_case_property('cc_exploded_from') == 'case2':
                self.assertEqual(len(ledger_values), 0)
            else:
                self.assertEqual(len(ledger_values), len(self.ledgers))
                for id, balance in six.iteritems(self.ledgers):
                    self.assertEqual(ledger_values[id].balance, balance.entry.quantity)
                    self.assertEqual(ledger_values[id].entry_id, balance.entry.id)
def _update_case(domain, case_id, server_modified_on, last_visit_date=None):
    accessors = CaseAccessors(domain)
    case = accessors.get_case(case_id)
    case.server_modified_on = server_modified_on
    if last_visit_date:
        set_case_property_directly(case, 'last_visit_date', last_visit_date.strftime('%Y-%m-%d'))
    if should_use_sql_backend(domain):
        CaseAccessorSQL.save_case(case)
    else:
        # can't call case.save() since it overrides the server_modified_on property
        CommCareCase.get_db().save_doc(case.to_json())
class Command(BaseCommand):

    def add_arguments(self, parser):
        parser.add_argument(
            'shard',
            help="db shard to query against",
        )
        parser.add_argument(
            'log_file',
            help="File path for log file",
        )

    def handle(self, shard, log_file, **options):
        self.domain = 'icds-cas'
        self.db = shard
        self.case_accessor = CaseAccessors(self.domain)
        failed_updates = []
        with open(log_file, "w", encoding='utf-8') as fh:
            fh.write('--------Successful Form Ids----------\n')
            chunk_num = 1
            for orphan_case_chunk in self._get_cases():
                print('Currently on chunk {}'.format(chunk_num))
                case_tupes = [(case_id, {}, True) for case_id in orphan_case_chunk]
                try:
                    xform, cases = bulk_update_cases(
                        self.domain, case_tupes, self.__module__)
                    fh.write(xform.form_id + '\n')
                except LocalSubmissionError as e:
                    print('submission error')
                    print(six.text_type(e))
                    failed_updates.extend(orphan_case_chunk)
                except Exception as e:
                    print('unexpected error')
                    print(six.text_type(e))
                    failed_updates.extend(orphan_case_chunk)
                chunk_num += 1
            fh.write('--------Failed Cases--------------\n')
            for case_id in failed_updates:
                fh.write(case_id + '\n')
            fh.write('--------Logging Complete--------------\n')
            print('-------------COMPLETE--------------')

    def _get_cases(self):
        hh_cases = CommCareCaseSQL.objects.using(self.db).filter(domain=self.domain,
                                                                 type='household',
                                                                 closed=True).values_list('case_id', flat=True)
        for cases in chunked(hh_cases, 100):
            related_cases = self.case_accessor.get_reverse_indexed_cases(list(cases))
            ccs_cases = self.case_accessor.get_reverse_indexed_cases([case.case_id for case in related_cases])
            orphan_cases = {case.case_id for case in related_cases if not case.closed and
                            [c for c in case.cached_indices if c.relationship == 'child']}
            orphan_cases |= {case.case_id for case in ccs_cases if not case.closed and
                            [c for c in case.cached_indices if c.relationship == 'child']}
            yield orphan_cases
Beispiel #23
0
def get_first_claim(domain, user_id, case_id):
    """
    Returns the first claim by user_id of case_id, or None
    """
    case = CaseAccessors(domain).get_case(case_id)
    identifier = DEFAULT_CASE_INDEX_IDENTIFIERS[CASE_INDEX_EXTENSION]
    try:
        return next((c for c in case.get_subcases(identifier)
                     if c.type == CLAIM_CASE_TYPE and c.owner_id == user_id))
    except StopIteration:
        return None
Beispiel #24
0
 def test_sync_usercase_custom_user_data_on_create(self):
     """
     Custom user data should be synced when the user is created
     """
     self.user.user_data = {
         'completed_training': 'yes',
     }
     self.user.save()
     case = CaseAccessors(TEST_DOMAIN).get_case_by_domain_hq_user_id(self.user._id, USERCASE_TYPE)
     self.assertIsNotNone(case)
     self.assertEquals(case.dynamic_case_properties()['completed_training'], 'yes')
class Command(BaseCommand):

    def add_arguments(self, parser):
        parser.add_argument(
            'domain',
            help="File path for log file",
        )
        parser.add_argument(
            'log_file',
            help="File path for log file",
        )

    def handle(self, domain, log_file, **options):
        total_cases = CaseES().domain(domain).case_type('household').is_closed().count()
        self.case_accessor = CaseAccessors(domain)
        failed_updates = []
        with open(log_file, "w", encoding='utf-8') as fh:
            fh.write('--------Successful Form Ids----------\n')
            for cases in chunked(with_progress_bar(self._get_cases_to_process(domain), total_cases), 100):
                related_cases = self._get_related_cases(cases)
                case_tupes = [(case_id, {}, True) for case_id in related_cases]
                try:
                    xform, cases = bulk_update_cases(
                        domain, case_tupes, self.__module__)
                    fh.write(xform.form_id + '\n')
                except LocalSubmissionError as e:
                    print('submission error')
                    print(six.text_type(e))
                    failed_updates.extend(related_cases)
                except Exception as e:
                    print('unexpected error')
                    print(six.text_type(e))
                    failed_updates.extend(related_cases)
            fh.write('--------Failed Cases--------------\n')
            for case_id in failed_updates:
                fh.write(case_id)
            print('-------------COMPLETE--------------')

    def _get_cases_to_process(self, domain):
        from corehq.sql_db.util import get_db_aliases_for_partitioned_query
        dbs = get_db_aliases_for_partitioned_query()
        for db in dbs:
            cases = CommCareCaseSQL.objects.using(db).filter(domain=domain, type='household', closed=True)
            for case in cases:
                yield case.case_id

    def _get_related_cases(self, cases):
        related_cases = {case.case_id for case in self.case_accessor.get_all_reverse_indices_info(list(cases))
                         if case.relationship == CommCareCaseIndexSQL.CHILD}
        related_cases |= {case.case_id for case in
                          self.case_accessor.get_all_reverse_indices_info(list(related_cases))
                         if case.relationship == CommCareCaseIndexSQL.CHILD}
        return related_cases
Beispiel #26
0
def cases_referenced_by_xform(xform):
    """
    Returns a list of CommCareCase or CommCareCaseSQL given a JSON
    representation of an XFormInstance
    """
    from corehq.form_processor.backends.couch.dbaccessors import CaseAccessorCouch
    from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
    case_ids = get_case_ids_from_form(xform)
    domain = get_and_check_xform_domain(xform)
    case_accessor = CaseAccessors(domain)
    if domain is None:
        assert case_accessor.db_accessor == CaseAccessorCouch
    return list(case_accessor.get_cases(list(case_ids)))
Beispiel #27
0
def get_adherence_cases_between_dates(domain, person_case_id, start_date, end_date):
    case_accessor = CaseAccessors(domain)
    episode = get_open_episode_case_from_person(domain, person_case_id)
    indexed_cases = case_accessor.get_reverse_indexed_cases([episode.case_id])
    open_pertinent_adherence_cases = [
        case for case in indexed_cases
        if not case.closed and case.type == CASE_TYPE_ADHERENCE and
        (start_date.astimezone(pytz.UTC) <=
         parse_datetime(case.dynamic_case_properties().get('adherence_date')).astimezone(pytz.UTC) <=
         end_date.astimezone(pytz.UTC))
    ]

    return open_pertinent_adherence_cases
def _with_case(domain, case_type, last_modified):
    with drop_connected_signals(case_post_save):
        case = CaseFactory(domain).create_case(case_type=case_type)

    _update_case(domain, case.case_id, last_modified)
    accessors = CaseAccessors(domain)
    case = accessors.get_case(case.case_id)
    try:
        yield case
    finally:
        if should_use_sql_backend(domain):
            CaseAccessorSQL.hard_delete_cases(domain, [case.case_id])
        else:
            case.delete()
Beispiel #29
0
def hint_still_valid(domain, hint):
    """
    For a given domain/owner/cleanliness hint check if it's still valid
    """
    casedb = CaseAccessors(domain)
    try:
        hint_case = casedb.get_case(hint)
        hint_owner = hint_case.owner_id
    except CaseNotFound:
        # hint was deleted
        return False
    dependent_case_ids = set(get_dependent_case_info(domain, [hint]).all_ids)
    return any([c.owner_id != hint_owner and c.owner_id != UNOWNED_EXTENSION_OWNER_ID
                for c in casedb.get_cases(list(dependent_case_ids))])
    def handle(self, *args, **options):
        domain = options["domain"]
        case_id = options["case_id"]
        case_accessor = CaseAccessors(domain=domain)
        case = case_accessor.get_case(case_id)
        if (
            not case.is_deleted
            and raw_input(
                "\n".join(["Case {} is not already deleted. Are you sure you want to delete it? (y/N)".format(case_id)])
            ).lower()
            != "y"
        ):
            sys.exit(0)
        dependent_case_ids = get_entire_case_network(domain, [case_id])

        cases_to_delete = filter(lambda case: not case.is_deleted, case_accessor.get_cases(dependent_case_ids))
        if cases_to_delete:
            with open(options["filename"], "w") as csvfile:
                writer = csv.writer(csvfile)
                headers = ["case id", "case type", "owner", "opened by", "app version"]
                writer.writerow(headers)
                print headers

                for case in cases_to_delete:
                    form = FormAccessors(domain=domain).get_form(case.xform_ids[0])
                    app_version_info = get_app_version_info(
                        domain, form.build_id, form.form_data["@version"], form.metadata
                    )
                    row = [
                        case.case_id,
                        case.type,
                        cached_owner_id_to_display(case.owner_id) or case.owner_id,
                        cached_owner_id_to_display(case.opened_by),
                        app_version_info.build_version,
                    ]
                    writer.writerow(row)
                    print row

        if (
            cases_to_delete
            and raw_input("\n".join(["Delete these {} cases? (y/N)".format(len(cases_to_delete))])).lower() == "y"
        ):
            case_accessor.soft_delete_cases([c.case_id for c in cases_to_delete])
            print "deleted {} cases".format(len(cases_to_delete))

        if cases_to_delete:
            print "details here: {}".format(options["filename"])
        else:
            print "didn't find any cases to delete"
Beispiel #31
0
class Command(BaseCommand):
    help = """
    Finds cases with duplicate IDs and marks all but one of each ID as a duplicate
    """
    logfile_fields = [
        # person-case properties always logged
        'person_case_id',
        'person_name',
        'dto_name',
        'phi_name',
        'owner_id',
        'dob',
        'phone_number',
        'dataset',
        'enrolled_in_private',
        # case-specific properties that may be updated
        'case_type',
        'case_id',
        'name',
        'person_id',
        'person_id_flat',
        'person_id_deprecated',
        'person_id_flat_deprecated',
        'person_id_at_request',
        'person_id_flat_at_request',
    ]

    def add_arguments(self, parser):
        parser.add_argument('domain')
        parser.add_argument(
            '--commit',
            action='store_true',
            dest='commit',
            default=False,
        )

    def handle(self, domain, **options):
        self.domain = domain
        self.accessor = CaseAccessors(domain)
        commit = options['commit']
        self.id_generator = ReadableIdGenerator(domain, commit)

        filename = '{}-{}.csv'.format(
            self.__module__.split('.')[-1],
            datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
        print("Logging actions to {}".format(filename))
        with open(filename, 'w') as f:
            logfile = csv.DictWriter(f,
                                     self.logfile_fields,
                                     extrasaction='ignore')
            logfile.writeheader()

            print("Finding duplicates")
            bad_case_stubs = get_duplicated_case_stubs(self.domain,
                                                       CASE_TYPE_PERSON)
            bad_cases = self.accessor.iter_cases(stub['case_id']
                                                 for stub in bad_case_stubs)

            print("Processing duplicate cases")
            for person_case in with_progress_bar(bad_cases,
                                                 len(bad_case_stubs)):
                if person_case.get_case_property(
                        'enrolled_in_private') == 'true':
                    updates = list(
                        filter(None, self.get_private_updates(person_case)))
                else:
                    updates = list(
                        filter(None, self.get_public_updates(person_case)))

                person_info = self.get_person_case_info(person_case)
                for case, update in updates:
                    log = {
                        unidecode(k): unidecode(v)
                        for d in [person_info, update] for k, v in d.items()
                        if v
                    }
                    log['case_type'] = case.type
                    log['case_id'] = case.case_id
                    logfile.writerow(log)

                if commit:
                    update_tuples = [(case.case_id, update, False)
                                     for case, update in updates]
                    bulk_update_cases(self.domain, update_tuples,
                                      self.__module__)
Beispiel #32
0
 def __init__(self, timing_context, restore_state, async_task=None):
     self.timing_context = timing_context
     self.restore_state = restore_state
     self.case_accessor = CaseAccessors(self.restore_state.domain)
     self.async_task = async_task
Beispiel #33
0
            if importer_util.is_valid_id(uploaded_owner_id, domain, id_cache):
                owner_id = uploaded_owner_id
                id_cache[uploaded_owner_id] = True
            else:
                errors.add(ImportErrors.InvalidOwnerId, i + 1, 'owner_id')
                id_cache[uploaded_owner_id] = False
                continue
        else:
            # if they didn't supply an owner_id mapping, default to current
            # user
            owner_id = user_id

        extras = {}
        if parent_id:
            try:
                parent_case = CaseAccessors(domain).get_case(parent_id)
                track_load()

                if parent_case.domain == domain:
                    extras['index'] = {
                        parent_ref: (parent_case.type, parent_id)
                    }
            except ResourceNotFound:
                errors.add(ImportErrors.InvalidParentId, i + 1, 'parent_id')
                continue
        elif parent_external_id:
            parent_case, error = importer_util.lookup_case(
                'external_id', parent_external_id, domain, parent_type)
            track_load()
            if parent_case:
                extras['index'] = {
Beispiel #34
0
class TestFormArchiving(TestCase, TestFileMixin):
    file_path = ('data', 'sample_xforms')
    root = os.path.dirname(__file__)

    def setUp(self):
        super(TestFormArchiving, self).setUp()
        self.casedb = CaseAccessors('test-domain')
        self.formdb = FormAccessors('test-domain')

    def tearDown(self):
        FormProcessorTestUtils.delete_all_xforms()
        FormProcessorTestUtils.delete_all_cases()
        super(TestFormArchiving, self).tearDown()

    @run_with_all_backends
    def testArchive(self):
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        response, xform, cases = submit_form_locally(
            xml_data,
            'test-domain',
        )

        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        lower_bound = datetime.utcnow() - timedelta(seconds=1)
        xform.archive(user_id='mr. librarian')
        upper_bound = datetime.utcnow() + timedelta(seconds=1)

        xform = self.formdb.get_form(xform.form_id)
        self.assertTrue(xform.is_archived)
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)
        self.assertEqual(case.xform_ids, [])

        [archival] = xform.history
        self.assertTrue(lower_bound <= archival.date <= upper_bound)
        self.assertEqual('archive', archival.operation)
        self.assertEqual('mr. librarian', archival.user)

        lower_bound = datetime.utcnow() - timedelta(seconds=1)
        xform.unarchive(user_id='mr. researcher')
        upper_bound = datetime.utcnow() + timedelta(seconds=1)

        xform = self.formdb.get_form(xform.form_id)
        self.assertTrue(xform.is_normal)
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)
        self.assertEqual(case.xform_ids, [xform.form_id])

        [archival, restoration] = xform.history
        self.assertTrue(lower_bound <= restoration.date <= upper_bound)
        self.assertEqual('unarchive', restoration.operation)
        self.assertEqual('mr. researcher', restoration.user)

    @run_with_all_backends
    def testSignal(self):
        global archive_counter, restore_counter
        archive_counter = 0
        restore_counter = 0

        def count_archive(**kwargs):
            global archive_counter
            archive_counter += 1

        def count_unarchive(**kwargs):
            global restore_counter
            restore_counter += 1

        xform_archived.connect(count_archive)
        xform_unarchived.connect(count_unarchive)

        xml_data = self.get_xml('basic')
        response, xform, cases = submit_form_locally(
            xml_data,
            'test-domain',
        )

        self.assertEqual(0, archive_counter)
        self.assertEqual(0, restore_counter)

        xform.archive()
        self.assertEqual(1, archive_counter)
        self.assertEqual(0, restore_counter)

        xform = self.formdb.get_form(xform.form_id)
        xform.unarchive()
        self.assertEqual(1, archive_counter)
        self.assertEqual(1, restore_counter)

    @override_settings(TESTS_SHOULD_USE_SQL_BACKEND=True)
    def testPublishChanges(self):
        xml_data = self.get_xml('basic')
        response, xform, cases = submit_form_locally(
            xml_data,
            'test-domain',
        )

        with capture_kafka_changes_context(topics.FORM_SQL) as change_context:
            with drop_connected_signals(xform_archived):
                xform.archive()
        self.assertEqual(1, len(change_context.changes))
        self.assertEqual(change_context.changes[0].id, xform.form_id)

        xform = self.formdb.get_form(xform.form_id)
        with capture_kafka_changes_context(topics.FORM_SQL) as change_context:
            with drop_connected_signals(xform_unarchived):
                xform.unarchive()
        self.assertEqual(1, len(change_context.changes))
        self.assertEqual(change_context.changes[0].id, xform.form_id)
Beispiel #35
0
class EditFormTest(TestCase, TestFileMixin):
    ID = '7H46J37FGH3'
    domain = 'test-form-edits'

    file_path = ('data', 'deprecation')
    root = os.path.dirname(__file__)

    def setUp(self):
        super(EditFormTest, self).setUp()
        self.interface = FormProcessorInterface(self.domain)
        self.casedb = CaseAccessors(self.domain)
        self.formdb = FormAccessors(self.domain)

    def tearDown(self):
        FormProcessorTestUtils.delete_all_xforms(self.domain)
        FormProcessorTestUtils.delete_all_cases(self.domain)
        UnfinishedSubmissionStub.objects.all().delete()
        super(EditFormTest, self).tearDown()

    def test_basic_edit(self):
        original_xml = self.get_xml('original')
        edit_xml = self.get_xml('edit')

        xform = post_xform(original_xml, domain=self.domain)

        self.assertEqual(self.ID, xform.form_id)
        self.assertTrue(xform.is_normal)
        self.assertEqual("", xform.form_data['vitals']['height'])
        self.assertEqual("other", xform.form_data['assessment']['categories'])

        xform = post_xform(edit_xml, domain=self.domain)
        self.assertEqual(self.ID, xform.form_id)
        self.assertTrue(xform.is_normal)
        self.assertEqual("100", xform.form_data['vitals']['height'])
        self.assertEqual("Edited Baby!", xform.form_data['assessment']['categories'])

        deprecated_xform = self.formdb.get_form(xform.deprecated_form_id)

        self.assertEqual(self.ID, deprecated_xform.orig_id)
        self.assertNotEqual(self.ID, deprecated_xform.form_id)
        self.assertTrue(deprecated_xform.is_deprecated)
        self.assertEqual("", deprecated_xform.form_data['vitals']['height'])
        self.assertEqual("other", deprecated_xform.form_data['assessment']['categories'])

        self.assertEqual(xform.received_on, deprecated_xform.received_on)
        self.assertEqual(xform.deprecated_form_id, deprecated_xform.form_id)
        self.assertTrue(xform.edited_on > deprecated_xform.received_on)

        self.assertEqual(
            deprecated_xform.get_xml(),
            original_xml
        )
        self.assertEqual(xform.get_xml(), edit_xml)

    def test_edit_an_error(self):
        form_id = uuid.uuid4().hex
        case_block = CaseBlock(
            create=True,
            case_id='',  # this should cause the submission to error
            case_type='person',
            owner_id='some-owner',
        )

        form, _ = submit_case_blocks(case_block.as_string(), domain=self.domain, form_id=form_id)
        self.assertTrue(form.is_error)
        self.assertTrue('IllegalCaseId' in form.problem)

        case_block.case_id = uuid.uuid4().hex
        form, _ = submit_case_blocks(case_block.as_string(), domain=self.domain, form_id=form_id)
        self.assertFalse(form.is_error)
        self.assertEqual(None, getattr(form, 'problem', None))

    def test_broken_save(self):
        """
        Test that if the second form submission terminates unexpectedly
        and the main form isn't saved, then there are no side effects
        such as the original having been marked as deprecated.
        """

        original_xml = self.get_xml('original')
        edit_xml = self.get_xml('edit')

        result = submit_form_locally(original_xml, self.domain)
        xform = result.xform
        self.assertEqual(self.ID, xform.form_id)
        self.assertTrue(xform.is_normal)
        self.assertEqual(self.domain, xform.domain)

        self.assertEqual(
            UnfinishedSubmissionStub.objects.filter(xform_id=self.ID).count(),
            0
        )

        with patch.object(self.interface.processor, 'save_processed_models', side_effect=RequestFailed):
            with self.assertRaises(RequestFailed):
                submit_form_locally(edit_xml, self.domain)

        # it didn't go through, so make sure there are no edits still
        self.assertIsNone(getattr(xform, 'deprecated_form_id', None))

        xform = self.formdb.get_form(self.ID)
        self.assertIsNotNone(xform)
        self.assertEqual(
            UnfinishedSubmissionStub.objects.filter(xform_id=self.ID,
                                                    saved=False).count(),
            1
        )
        self.assertEqual(
            UnfinishedSubmissionStub.objects.filter(xform_id=self.ID).count(),
            1
        )

    def test_case_management(self):
        form_id = uuid.uuid4().hex
        case_id = uuid.uuid4().hex
        owner_id = uuid.uuid4().hex
        case_block = CaseBlock(
            create=True,
            case_id=case_id,
            case_type='person',
            owner_id=owner_id,
            update={
                'property': 'original value'
            }
        ).as_string()
        submit_case_blocks(case_block, domain=self.domain, form_id=form_id)

        # validate some assumptions
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.type, 'person')
        self.assertEqual(case.dynamic_case_properties()['property'], 'original value')
        self.assertEqual([form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertEqual(2, len(case.actions))
            for a in case.actions:
                self.assertEqual(form_id, a.xform_id)

        # submit a new form with a different case update
        case_block = CaseBlock(
            create=True,
            case_id=case_id,
            case_type='newtype',
            owner_id=owner_id,
            update={
                'property': 'edited value'
            }
        ).as_string()
        xform, _ = submit_case_blocks(case_block, domain=self.domain, form_id=form_id)

        case = self.casedb.get_case(case_id)
        self.assertEqual(case.type, 'newtype')
        self.assertEqual(case.dynamic_case_properties()['property'], 'edited value')
        self.assertEqual([form_id], case.xform_ids)
        self.assertEqual(case.server_modified_on, xform.edited_on)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertEqual(2, len(case.actions))
            for a in case.actions:
                self.assertEqual(form_id, a.xform_id)

    def test_second_edit_fails(self):
        form_id = uuid.uuid4().hex
        case_id = uuid.uuid4().hex
        case_block = CaseBlock(
            create=True,
            case_id=case_id,
            case_type='person',
        ).as_string()
        submit_case_blocks(case_block, domain=self.domain, form_id=form_id)

        # submit an edit form with a bad case update (for example a bad ID)
        case_block = CaseBlock(
            create=True,
            case_id='',
            case_type='person',
        ).as_string()
        submit_case_blocks(case_block, domain=self.domain, form_id=form_id)

        xform = self.formdb.get_form(form_id)
        self.assertTrue(xform.is_error)

        deprecated_xform = self.formdb.get_form(xform.deprecated_form_id)
        self.assertTrue(deprecated_xform.is_deprecated)

    def test_case_management_ordering(self):
        case_id = uuid.uuid4().hex
        owner_id = uuid.uuid4().hex

        # create a case
        case_block = CaseBlock(
            create=True,
            case_id=case_id,
            case_type='person',
            owner_id=owner_id,
        ).as_string()
        create_form_id = submit_case_blocks(case_block, domain=self.domain)[0].form_id

        # validate that worked
        case = self.casedb.get_case(case_id)
        self.assertEqual([create_form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertTrue(create_form_id in [a.xform_id for a in case.actions])
            for a in case.actions:
                self.assertEqual(create_form_id, a.xform_id)

        edit_date = datetime.utcnow()
        # set some property value
        case_block = CaseBlock(
            create=False,
            case_id=case_id,
            date_modified=edit_date,
            update={
                'property': 'first value',
            }
        ).as_string()
        edit_form_id = submit_case_blocks(case_block, domain=self.domain)[0].form_id

        # validate that worked
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.dynamic_case_properties()['property'], 'first value')
        self.assertEqual([create_form_id, edit_form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertTrue(all(
                form_id in [a.xform_id for a in case.actions]
                for form_id in [create_form_id, edit_form_id]
            ))

        # submit a second (new) form updating the value
        case_block = CaseBlock(
            create=False,
            case_id=case_id,
            update={
                'property': 'final value',
            }
        ).as_string()
        second_edit_form_id = submit_case_blocks(case_block, domain=self.domain)[0].form_id

        # validate that worked
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.dynamic_case_properties()['property'], 'final value')
        self.assertEqual([create_form_id, edit_form_id, second_edit_form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertTrue(all(
                form_id in [a.xform_id for a in case.actions]
                for form_id in [create_form_id, edit_form_id, second_edit_form_id]
            ))

        # deprecate the middle edit
        case_block = CaseBlock(
            create=False,
            case_id=case_id,
            date_modified=edit_date,  # need to use the previous edit date for action sort comparisons
            update={
                'property': 'edited value',
                'added_property': 'added value',
            }
        ).as_string()
        submit_case_blocks(case_block, domain=self.domain, form_id=edit_form_id)

        # ensure that the middle edit stays in the right place and is applied
        # before the final one
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.dynamic_case_properties()['property'], 'final value')
        self.assertEqual(case.dynamic_case_properties()['added_property'], 'added value')
        self.assertEqual([create_form_id, edit_form_id, second_edit_form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertEqual(
                [create_form_id, create_form_id, edit_form_id, second_edit_form_id],
                [a.xform_id for a in case.actions]
            )
    def get(self, request, domain, case_id=None, attachment_id=None):
        """
        https://github.com/dimagi/commcare/wiki/CaseAttachmentAPI
        max_size	The largest size (in bytes) for the attachment
        max_image_width	The largest width in pixels for an an image attachment
        max_image_height	The largest width in pixels for an an image attachment
        """

        if self.request.couch_user.is_web_user() and not can_view_attachments(
                self.request):
            return HttpResponseForbidden()

        if not case_id or not attachment_id:
            raise Http404

        img = self.request.GET.get('img', None)
        size = self.request.GET.get('size', OBJECT_ORIGINAL)
        max_width = int(self.request.GET.get('max_image_width', 0))
        max_height = int(self.request.GET.get('max_image_height', 0))
        max_filesize = int(self.request.GET.get('max_size', 0))

        try:
            CaseAccessors(domain).get_case(case_id)
        except CaseNotFound:
            raise Http404

        if img is not None:
            if size == "debug_all":
                url_base = reverse("api_case_attachment",
                                   kwargs={
                                       "domain": self.request.domain,
                                       "case_id": case_id,
                                       "attachment_id": attachment_id,
                                   })

                r = HttpResponse(content_type="text/html")
                r.write('<html><body>')
                r.write('<ul>')
                for fsize in IMAGE_SIZE_ORDERING:
                    meta, stream = fetch_case_image(
                        domain,
                        case_id,
                        attachment_id,
                        filesize_limit=max_filesize,
                        width_limit=max_width,
                        height_limit=max_height,
                        fixed_size=fsize)

                    r.write('<li>')
                    r.write('Size: %s<br>' % fsize)
                    r.write("Limit: max_size: %s" % max_filesize)
                    if max_width > 0:
                        r.write(", max_width: %s" % max_width)
                    if max_height > 0:
                        r.write(", max_height: %s" % max_height)
                    r.write("<br>")
                    if meta is not None:
                        r.write('Resolution: %d x %d<br>' %
                                (meta['width'], meta['height']))
                        r.write('Filesize: %d<br>' % meta['content_length'])

                        url_params = urllib.urlencode({
                            "img":
                            '1',
                            "size":
                            fsize,
                            "max_size":
                            max_filesize,
                            "max_image_width":
                            max_width,
                            "max_image_height":
                            max_height
                        })
                        r.write('<img src="%(attach_url)s?%(params)s">' % {
                            "attach_url": url_base,
                            "params": url_params
                        })
                    else:
                        r.write('Not available')
                    r.write('</li>')
                r.write('</ul></body></html>')
                return r
            else:
                attachment_meta, attachment_stream = fetch_case_image(
                    domain,
                    case_id,
                    attachment_id,
                    filesize_limit=max_filesize,
                    width_limit=max_width,
                    height_limit=max_height,
                    fixed_size=size)
        else:
            cached_attachment = get_cached_case_attachment(
                domain, case_id, attachment_id)
            attachment_meta, attachment_stream = cached_attachment.get()

        if attachment_meta is not None:
            mime_type = attachment_meta['content_type']
        else:
            mime_type = "plain/text"

        return StreamingHttpResponse(
            streaming_content=FileWrapper(attachment_stream),
            content_type=mime_type)
class ImporterTest(TestCase):
    def setUp(self):
        super(ImporterTest, self).setUp()
        self.domain_obj = create_domain("importer-test")
        self.domain = self.domain_obj.name
        self.default_case_type = 'importer-test-casetype'

        self.couch_user = WebUser.create(None, "test", "foobar", None, None)
        self.couch_user.add_domain_membership(self.domain, is_admin=True)
        self.couch_user.save()

        self.accessor = CaseAccessors(self.domain)

        self.factory = CaseFactory(domain=self.domain,
                                   case_defaults={
                                       'case_type': self.default_case_type,
                                   })
        delete_all_cases()

    def tearDown(self):
        self.couch_user.delete(deleted_by=None)
        self.domain_obj.delete()
        super(ImporterTest, self).tearDown()

    def _config(self,
                col_names,
                search_column=None,
                case_type=None,
                search_field='case_id',
                create_new_cases=True):
        return ImporterConfig(
            couch_user_id=self.couch_user._id,
            case_type=case_type or self.default_case_type,
            excel_fields=col_names,
            case_fields=[''] * len(col_names),
            custom_fields=col_names,
            search_column=search_column or col_names[0],
            search_field=search_field,
            create_new_cases=create_new_cases,
        )

    @run_with_all_backends
    @patch('corehq.apps.case_importer.tasks.bulk_import_async.update_state')
    def testImportFileMissing(self, update_state):
        # by using a made up upload_id, we ensure it's not referencing any real file
        case_upload = CaseUploadRecord(upload_id=str(uuid.uuid4()),
                                       task_id=str(uuid.uuid4()))
        case_upload.save()
        res = bulk_import_async.delay(self._config(['anything']), self.domain,
                                      case_upload.upload_id)
        self.assertIsInstance(res.result, Ignore)
        update_state.assert_called_with(
            state=states.FAILURE,
            meta=get_interned_exception(
                'Sorry, your session has expired. Please start over and try again.'
            ))
        self.assertEqual(0, len(get_case_ids_in_domain(self.domain)))

    @run_with_all_backends
    def testImportBasic(self):
        config = self._config(['case_id', 'age', 'sex', 'location'])
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            ['case_id-0', 'age-0', 'sex-0', 'location-0'],
            ['case_id-1', 'age-1', 'sex-1', 'location-1'],
            ['case_id-2', 'age-2', 'sex-2', 'location-2'],
            ['case_id-3', 'age-3', 'sex-3', 'location-3'],
            ['case_id-4', 'age-4', 'sex-4', 'location-4'],
        )
        res = do_import(file, config, self.domain)
        self.assertEqual(5, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertFalse(res['errors'])
        self.assertEqual(1, res['num_chunks'])
        case_ids = self.accessor.get_case_ids_in_domain()
        cases = list(self.accessor.get_cases(case_ids))
        self.assertEqual(5, len(cases))
        properties_seen = set()
        for case in cases:
            self.assertEqual(self.couch_user._id, case.user_id)
            self.assertEqual(self.couch_user._id, case.owner_id)
            self.assertEqual(self.default_case_type, case.type)
            for prop in ['age', 'sex', 'location']:
                self.assertTrue(prop in case.get_case_property(prop))
                self.assertFalse(
                    case.get_case_property(prop) in properties_seen)
                properties_seen.add(case.get_case_property(prop))

    @run_with_all_backends
    def testImportNamedColumns(self):
        config = self._config(['case_id', 'age', 'sex', 'location'])
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            ['case_id-0', 'age-0', 'sex-0', 'location-0'],
            ['case_id-1', 'age-1', 'sex-1', 'location-1'],
            ['case_id-2', 'age-2', 'sex-2', 'location-2'],
            ['case_id-3', 'age-3', 'sex-3', 'location-3'],
        )
        res = do_import(file, config, self.domain)

        self.assertEqual(4, res['created_count'])
        self.assertEqual(4, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def testImportTrailingWhitespace(self):
        cols = ['case_id', 'age', 'sex\xa0', 'location']
        config = self._config(cols)
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex\xa0', 'location'],
            ['case_id-0', 'age-0', 'sex\xa0-0', 'location-0'],
        )
        res = do_import(file, config, self.domain)

        self.assertEqual(1, res['created_count'])
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        case = self.accessor.get_case(case_ids[0])
        self.assertTrue(bool(case.get_case_property(
            'sex')))  # make sure the value also got properly set

    @run_with_all_backends
    def testCaseIdMatching(self):
        # bootstrap a stub case
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
                'update': {
                    'importer_test_prop': 'foo'
                },
            }))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

        config = self._config(['case_id', 'age', 'sex', 'location'])
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            [case.case_id, 'age-0', 'sex-0', 'location-0'],
            [case.case_id, 'age-1', 'sex-1', 'location-1'],
            [case.case_id, 'age-2', 'sex-2', 'location-2'],
        )
        res = do_import(file, config, self.domain)
        self.assertEqual(0, res['created_count'])
        self.assertEqual(3, res['match_count'])
        self.assertFalse(res['errors'])

        # shouldn't create any more cases, just the one
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        [case] = self.accessor.get_cases(case_ids)
        for prop in ['age', 'sex', 'location']:
            self.assertTrue(prop in case.get_case_property(prop))

        # shouldn't touch existing properties
        self.assertEqual('foo', case.get_case_property('importer_test_prop'))

    @run_with_all_backends
    def testCaseLookupTypeCheck(self):
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
                'case_type': 'nonmatch-type',
            }))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
        config = self._config(['case_id', 'age', 'sex', 'location'])
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            [case.case_id, 'age-0', 'sex-0', 'location-0'],
            [case.case_id, 'age-1', 'sex-1', 'location-1'],
            [case.case_id, 'age-2', 'sex-2', 'location-2'],
        )
        res = do_import(file, config, self.domain)
        # because the type is wrong these shouldn't match
        self.assertEqual(3, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(4, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def testCaseLookupDomainCheck(self):
        self.factory.domain = 'wrong-domain'
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
            }))
        self.assertEqual(0, len(self.accessor.get_case_ids_in_domain()))
        config = self._config(['case_id', 'age', 'sex', 'location'])
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            [case.case_id, 'age-0', 'sex-0', 'location-0'],
            [case.case_id, 'age-1', 'sex-1', 'location-1'],
            [case.case_id, 'age-2', 'sex-2', 'location-2'],
        )
        res = do_import(file, config, self.domain)

        # because the domain is wrong these shouldn't match
        self.assertEqual(3, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(3, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def testExternalIdMatching(self):
        # bootstrap a stub case
        external_id = 'importer-test-external-id'
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
                'external_id': external_id,
            }))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

        headers = ['external_id', 'age', 'sex', 'location']
        config = self._config(headers, search_field='external_id')
        file = make_worksheet_wrapper(
            ['external_id', 'age', 'sex', 'location'],
            ['importer-test-external-id', 'age-0', 'sex-0', 'location-0'],
            ['importer-test-external-id', 'age-1', 'sex-1', 'location-1'],
            ['importer-test-external-id', 'age-2', 'sex-2', 'location-2'],
        )
        res = do_import(file, config, self.domain)
        self.assertEqual(0, res['created_count'])
        self.assertEqual(3, res['match_count'])
        self.assertFalse(res['errors'])

        # shouldn't create any more cases, just the one
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def test_external_id_matching_on_create_with_custom_column_name(self):
        headers = ['id_column', 'age', 'sex', 'location']
        external_id = 'external-id-test'
        config = self._config(headers[1:],
                              search_column='id_column',
                              search_field='external_id')
        file = make_worksheet_wrapper(
            ['id_column', 'age', 'sex', 'location'],
            ['external-id-test', 'age-0', 'sex-0', 'location-0'],
            ['external-id-test', 'age-1', 'sex-1', 'location-1'],
        )

        res = do_import(file, config, self.domain)
        self.assertFalse(res['errors'])
        self.assertEqual(1, res['created_count'])
        self.assertEqual(1, res['match_count'])
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        case = self.accessor.get_case(case_ids[0])
        self.assertEqual(external_id, case.external_id)

    def testNoCreateNew(self):
        config = self._config(['case_id', 'age', 'sex', 'location'],
                              create_new_cases=False)
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            ['case_id-0', 'age-0', 'sex-0', 'location-0'],
            ['case_id-1', 'age-1', 'sex-1', 'location-1'],
            ['case_id-2', 'age-2', 'sex-2', 'location-2'],
            ['case_id-3', 'age-3', 'sex-3', 'location-3'],
            ['case_id-4', 'age-4', 'sex-4', 'location-4'],
        )
        res = do_import(file, config, self.domain)

        # no matching and no create new set - should do nothing
        self.assertEqual(0, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(0, len(get_case_ids_in_domain(self.domain)))

    def testBlankRows(self):
        # don't create new cases for rows left blank
        config = self._config(['case_id', 'age', 'sex', 'location'],
                              create_new_cases=True)
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            [None, None, None, None],
            ['', '', '', ''],
        )
        res = do_import(file, config, self.domain)

        # no matching and no create new set - should do nothing
        self.assertEqual(0, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(0, len(get_case_ids_in_domain(self.domain)))

    @patch('corehq.apps.case_importer.do_import.CASEBLOCK_CHUNKSIZE', 2)
    def testBasicChunking(self):
        config = self._config(['case_id', 'age', 'sex', 'location'])
        file = make_worksheet_wrapper(
            ['case_id', 'age', 'sex', 'location'],
            ['case_id-0', 'age-0', 'sex-0', 'location-0'],
            ['case_id-1', 'age-1', 'sex-1', 'location-1'],
            ['case_id-2', 'age-2', 'sex-2', 'location-2'],
            ['case_id-3', 'age-3', 'sex-3', 'location-3'],
            ['case_id-4', 'age-4', 'sex-4', 'location-4'],
        )
        res = do_import(file, config, self.domain)
        # 5 cases in chunks of 2 = 3 chunks
        self.assertEqual(3, res['num_chunks'])
        self.assertEqual(5, res['created_count'])
        self.assertEqual(5, len(get_case_ids_in_domain(self.domain)))

    @run_with_all_backends
    def testExternalIdChunking(self):
        # bootstrap a stub case
        external_id = 'importer-test-external-id'

        headers = ['external_id', 'age', 'sex', 'location']
        config = self._config(headers, search_field='external_id')
        file = make_worksheet_wrapper(
            ['external_id', 'age', 'sex', 'location'],
            ['importer-test-external-id', 'age-0', 'sex-0', 'location-0'],
            ['importer-test-external-id', 'age-1', 'sex-1', 'location-1'],
            ['importer-test-external-id', 'age-2', 'sex-2', 'location-2'],
        )

        # the first one should create the case, and the remaining two should update it
        res = do_import(file, config, self.domain)
        self.assertEqual(1, res['created_count'])
        self.assertEqual(2, res['match_count'])
        self.assertFalse(res['errors'])
        self.assertEqual(2,
                         res['num_chunks'])  # the lookup causes an extra chunk

        # should just create the one case
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        [case] = self.accessor.get_cases(case_ids)
        self.assertEqual(external_id, case.external_id)
        for prop in ['age', 'sex', 'location']:
            self.assertTrue(prop in case.get_case_property(prop))

    @run_with_all_backends
    def testParentCase(self):
        headers = ['parent_id', 'name', 'case_id']
        config = self._config(headers,
                              create_new_cases=True,
                              search_column='case_id')
        rows = 3
        [parent_case] = self.factory.create_or_update_case(
            CaseStructure(attrs={'create': True}))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

        file = make_worksheet_wrapper(
            ['parent_id', 'name', 'case_id'],
            [parent_case.case_id, 'name-0', 'case_id-0'],
            [parent_case.case_id, 'name-1', 'case_id-1'],
            [parent_case.case_id, 'name-2', 'case_id-2'],
        )
        file_missing = make_worksheet_wrapper(
            ['parent_id', 'name', 'case_id'],
            ['parent_id-0', 'name-0', 'case_id-0'],
            ['parent_id-1', 'name-1', 'case_id-1'],
            ['parent_id-2', 'name-2', 'case_id-2'],
        )

        # Should successfully match on `rows` cases
        res = do_import(file, config, self.domain)
        self.assertEqual(rows, res['created_count'])

        # Should be unable to find parent case on `rows` cases
        res = do_import(file_missing, config, self.domain)
        error_column_name = 'parent_id'
        self.assertEqual(
            rows,
            len(res['errors'][exceptions.InvalidParentId.title]
                [error_column_name]['rows']),
            "All cases should have missing parent")

    def import_mock_file(self, rows):
        config = self._config(rows[0])
        xls_file = make_worksheet_wrapper(*rows)
        return do_import(xls_file, config, self.domain)

    @run_with_all_backends
    def testLocationOwner(self):
        # This is actually testing several different things, but I figure it's
        # worth it, as each of these tests takes a non-trivial amount of time.
        non_case_sharing = LocationType.objects.create(domain=self.domain,
                                                       name='lt1',
                                                       shares_cases=False)
        case_sharing = LocationType.objects.create(domain=self.domain,
                                                   name='lt2',
                                                   shares_cases=True)
        location = make_loc('loc-1', 'Loc 1', self.domain, case_sharing.code)
        make_loc('loc-2', 'Loc 2', self.domain, case_sharing.code)
        duplicate_loc = make_loc('loc-3', 'Loc 2', self.domain,
                                 case_sharing.code)
        improper_loc = make_loc('loc-4', 'Loc 4', self.domain,
                                non_case_sharing.code)

        res = self.import_mock_file([
            ['case_id', 'name', 'owner_id', 'owner_name'],
            ['', 'location-owner-id', location.group_id, ''],
            ['', 'location-owner-code', '', location.site_code],
            ['', 'location-owner-name', '', location.name],
            ['', 'duplicate-location-name', '', duplicate_loc.name],
            ['', 'non-case-owning-name', '', improper_loc.name],
        ])
        case_ids = self.accessor.get_case_ids_in_domain()
        cases = {c.name: c for c in list(self.accessor.get_cases(case_ids))}

        self.assertEqual(cases['location-owner-id'].owner_id,
                         location.group_id)
        self.assertEqual(cases['location-owner-code'].owner_id,
                         location.group_id)
        self.assertEqual(cases['location-owner-name'].owner_id,
                         location.group_id)

        error_message = exceptions.DuplicateLocationName.title
        error_column_name = None
        self.assertIn(error_message, res['errors'])
        self.assertEqual(
            res['errors'][error_message][error_column_name]['rows'], [5])

        error_message = exceptions.InvalidOwner.title
        self.assertIn(error_message, res['errors'])
        error_column_name = 'owner_name'
        self.assertEqual(
            res['errors'][error_message][error_column_name]['rows'], [6])

    @run_with_all_backends
    def test_opened_on(self):
        case = self.factory.create_case()
        new_date = '2015-04-30T14:41:53.000000Z'
        with flag_enabled('BULK_UPLOAD_DATE_OPENED'):
            self.import_mock_file([['case_id', 'date_opened'],
                                   [case.case_id, new_date]])
        case = CaseAccessors(self.domain).get_case(case.case_id)
        self.assertEqual(case.opened_on,
                         PhoneTime(parse_datetime(new_date)).done())
Beispiel #38
0
 def payload_doc(self, repeat_record):
     return CaseAccessors(repeat_record.domain).get_case(
         repeat_record.payload_id)
Beispiel #39
0
    return (previous_owner_id
            and previous_owner_id != UNOWNED_EXTENSION_OWNER_ID
            and previous_owner_id != next_owner_id)


def get_all_extensions_to_close(domain, case_updates):
    extensions_to_close = set()
    for case_update_meta in case_updates:
        extensions_to_close = extensions_to_close | get_extensions_to_close(
            case_update_meta.case, domain)
    return extensions_to_close


def get_extensions_to_close(case, domain):
    if case.closed and EXTENSION_CASES_SYNC_ENABLED.enabled(domain):
        return CaseAccessors(domain).get_extension_chain([case.case_id],
                                                         include_closed=False)
    else:
        return set()


def is_device_report(doc):
    """exclude device reports"""
    device_report_xmlns = "http://code.javarosa.org/devicereport"

    def _from_form_dict(doc):
        return isinstance(
            doc,
            dict) and "@xmlns" in doc and doc["@xmlns"] == device_report_xmlns

    def _from_xform_instance(doc):
Beispiel #40
0
 def obj_get(self, bundle, **kwargs):
     case_id = kwargs['pk']
     try:
         return CaseAccessors(kwargs['domain']).get_case(case_id)
     except CaseNotFound:
         raise object_does_not_exist("CommCareCase", case_id)
Beispiel #41
0
 def _get_updated_episode(self):
     self.case_updater.run()
     return CaseAccessors(self.domain).get_case(self.episode_id)
Beispiel #42
0
 def couch_doc(self):
     if self._couch_doc is None:
         self._couch_doc = CaseAccessors(self.domain).get_case(self._id)
     return self._couch_doc
Beispiel #43
0
class CaseAPIHelper(object):
    """
    Simple config object for querying the APIs
    """
    def __init__(self,
                 domain,
                 status=CASE_STATUS_OPEN,
                 case_type=None,
                 ids_only=False,
                 footprint=False,
                 strip_history=False,
                 filters=None):
        if status not in [
                CASE_STATUS_ALL, CASE_STATUS_CLOSED, CASE_STATUS_OPEN
        ]:
            raise ValueError("invalid case status %s" % status)
        self.domain = domain
        self.status = status
        self.case_type = case_type
        self.ids_only = ids_only
        self.wrap = not ids_only  # if we're just querying IDs we don't need to wrap the docs
        self.footprint = footprint
        self.strip_history = strip_history
        self.filters = filters
        self.case_accessors = CaseAccessors(self.domain)

    def _case_results(self, case_id_list):
        def _filter(res):
            if self.filters:
                for path, val in self.filters.items():
                    actual_val = safe_index(res.case_json, path.split("/"))
                    if actual_val != val:
                        # closed=false => case.closed == False
                        if val in ('null', 'true', 'false'):
                            if actual_val != json.loads(val):
                                return False
                        else:
                            return False
                return True

        if self.filters and not self.footprint:
            base_results = self._populate_results(case_id_list)
            return filter(_filter, base_results)

        if self.footprint:
            initial_case_ids = set(case_id_list)
            dependent_case_ids = get_dependent_case_info(
                self.domain, initial_case_ids).all_ids
            all_case_ids = initial_case_ids | dependent_case_ids
        else:
            all_case_ids = case_id_list

        if self.ids_only:
            return [
                CaseAPIResult(domain=self.domain, id=case_id, id_only=True)
                for case_id in all_case_ids
            ]
        else:
            return self._populate_results(all_case_ids)

    def _populate_results(self, case_id_list):
        if should_use_sql_backend(self.domain):
            base_results = [
                CaseAPIResult(domain=self.domain,
                              couch_doc=case,
                              id_only=self.ids_only)
                for case in self.case_accessors.iter_cases(case_id_list)
            ]
        else:
            base_results = [
                CaseAPIResult(domain=self.domain,
                              couch_doc=case,
                              id_only=self.ids_only) for case in iter_cases(
                                  case_id_list, self.strip_history, self.wrap)
            ]
        return base_results

    def get_all(self):
        status = self.status or CASE_STATUS_ALL
        if status == CASE_STATUS_ALL:
            case_ids = self.case_accessors.get_case_ids_in_domain(
                self.case_type)
        elif status == CASE_STATUS_OPEN:
            case_ids = self.case_accessors.get_open_case_ids_in_domain_by_type(
                self.case_type)
        else:
            raise ValueError("Invalid value for 'status': '%s'" % status)

        return self._case_results(case_ids)

    def get_owned(self, user_id):
        try:
            user = CouchUser.get_by_user_id(user_id, self.domain)
        except KeyError:
            user = None
        try:
            owner_ids = user.get_owner_ids()
        except AttributeError:
            owner_ids = [user_id]

        closed = {
            CASE_STATUS_OPEN: False,
            CASE_STATUS_CLOSED: True,
            CASE_STATUS_ALL: None,
        }[self.status]

        ids = self.case_accessors.get_case_ids_by_owners(owner_ids,
                                                         closed=closed)
        return self._case_results(ids)
    case_location = get_case_location(case)
    if not case_location:
        return []
    location_repeaters = defaultdict(list)
    for repeater in OpenmrsRepeater.by_domain(case.domain):
        if repeater.location_id:
            location_repeaters[repeater.location_id].append(repeater)
    for location_id in reversed(case_location.path):
        if location_id in location_repeaters:
            return location_repeaters[location_id]
    return []


def get_openmrs_location_uuid(domain, case_id):
    case = CaseAccessors(domain).get_case(case_id)
    location = get_case_location(case)
    return location.metadata.get(LOCATION_OPENMRS_UUID) if location else None


class CreatePersonAttributeTask(WorkflowTask):

    def __init__(self, requests, person_uuid, attribute_type_uuid, value):
        self.requests = requests
        self.person_uuid = person_uuid
        self.attribute_type_uuid = attribute_type_uuid
        self.value = value
        self.attribute_uuid = None

    def run(self):
        response = self.requests.post(
Beispiel #45
0
 def case(self):
     try:
         return CaseAccessors(self.domain).get_case(self.case_id)
     except CaseNotFound:
         return None
Beispiel #46
0
 def setUp(self):
     super(TestFormArchiving, self).setUp()
     self.casedb = CaseAccessors('test-domain')
     self.formdb = FormAccessors('test-domain')
Beispiel #47
0
class ImporterTest(TestCase):
    def setUp(self):
        super(ImporterTest, self).setUp()
        self.domain = create_domain("importer-test").name
        self.default_case_type = 'importer-test-casetype'
        self.default_headers = ['case_id', 'age', 'sex', 'location']

        self.couch_user = WebUser.create(None, "test", "foobar")
        self.couch_user.add_domain_membership(self.domain, is_admin=True)
        self.couch_user.save()

        self.accessor = CaseAccessors(self.domain)

        self.factory = CaseFactory(domain=self.domain,
                                   case_defaults={
                                       'case_type': self.default_case_type,
                                   })
        delete_all_cases()

    def tearDown(self):
        self.couch_user.delete()
        delete_all_locations()
        LocationType.objects.all().delete()
        super(ImporterTest, self).tearDown()

    def _config(self,
                col_names=None,
                search_column=None,
                case_type=None,
                search_field='case_id',
                named_columns=False,
                create_new_cases=True,
                type_fields=None):
        col_names = col_names or self.default_headers
        case_type = case_type or self.default_case_type
        search_column = search_column or col_names[0]
        type_fields = type_fields if type_fields is not None else [
            'plain'
        ] * len(col_names)
        return ImporterConfig(
            couch_user_id=self.couch_user._id,
            case_type=case_type,
            excel_fields=col_names,
            case_fields=[''] * len(col_names),
            custom_fields=col_names,
            type_fields=type_fields,
            search_column=search_column,
            search_field=search_field,
            named_columns=named_columns,
            create_new_cases=create_new_cases,
            key_column='',
            value_column='',
        )

    @run_with_all_backends
    def testImportNone(self):
        res = bulk_import_async(self._config(), self.domain, None)
        self.assertEqual(
            'Sorry, your session has expired. Please start over and try again.',
            unicode(res['errors']))
        self.assertEqual(0, len(get_case_ids_in_domain(self.domain)))

    @run_with_all_backends
    def testImporterErrors(self):
        with mock.patch(
                'corehq.apps.importer.tasks.importer_util.get_spreadsheet',
                side_effect=ImporterError()):
            res = bulk_import_async(self._config(), self.domain, None)
            self.assertEqual(
                'The session containing the file you uploaded has expired - please upload a new one.',
                unicode(res['errors']))
            self.assertEqual(0, len(get_case_ids_in_domain(self.domain)))

    @run_with_all_backends
    def testImportBasic(self):
        config = self._config(self.default_headers)
        file = MockExcelFile(header_columns=self.default_headers, num_rows=5)
        res = do_import(file, config, self.domain)
        self.assertEqual(5, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertFalse(res['errors'])
        self.assertEqual(1, res['num_chunks'])
        case_ids = self.accessor.get_case_ids_in_domain()
        cases = list(self.accessor.get_cases(case_ids))
        self.assertEqual(5, len(cases))
        properties_seen = set()
        for case in cases:
            self.assertEqual(self.couch_user._id, case.user_id)
            self.assertEqual(self.couch_user._id, case.owner_id)
            self.assertEqual(self.default_case_type, case.type)
            for prop in self.default_headers[1:]:
                self.assertTrue(prop in case.get_case_property(prop))
                self.assertFalse(
                    case.get_case_property(prop) in properties_seen)
                properties_seen.add(case.get_case_property(prop))

    @run_with_all_backends
    def testImportNamedColumns(self):
        config = self._config(self.default_headers, named_columns=True)
        file = MockExcelFile(header_columns=self.default_headers, num_rows=5)
        res = do_import(file, config, self.domain)
        # we create 1 less since we knock off the header column
        self.assertEqual(4, res['created_count'])
        self.assertEqual(4, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def testImportTrailingWhitespace(self):
        cols = ['case_id', 'age', u'sex\xa0', 'location']
        config = self._config(cols, named_columns=True)
        file = MockExcelFile(header_columns=cols, num_rows=2)
        res = do_import(file, config, self.domain)
        # we create 1 less since we knock off the header column
        self.assertEqual(1, res['created_count'])
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        case = self.accessor.get_case(case_ids[0])
        self.assertTrue(bool(case.get_case_property(
            'sex')))  # make sure the value also got properly set

    @run_with_all_backends
    def testCaseIdMatching(self):
        # bootstrap a stub case
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
                'update': {
                    'importer_test_prop': 'foo'
                },
            }))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

        config = self._config(self.default_headers)
        file = MockExcelFile(header_columns=self.default_headers,
                             num_rows=3,
                             row_generator=id_match_generator(case.case_id))
        res = do_import(file, config, self.domain)
        self.assertEqual(0, res['created_count'])
        self.assertEqual(3, res['match_count'])
        self.assertFalse(res['errors'])

        # shouldn't create any more cases, just the one
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        [case] = self.accessor.get_cases(case_ids)
        for prop in self.default_headers[1:]:
            self.assertTrue(prop in case.get_case_property(prop))

        # shouldn't touch existing properties
        self.assertEqual('foo', case.get_case_property('importer_test_prop'))

    @run_with_all_backends
    def testCaseLookupTypeCheck(self):
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
                'case_type': 'nonmatch-type',
            }))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
        config = self._config(self.default_headers)
        file = MockExcelFile(header_columns=self.default_headers,
                             num_rows=3,
                             row_generator=id_match_generator(case.case_id))
        res = do_import(file, config, self.domain)
        # because the type is wrong these shouldn't match
        self.assertEqual(3, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(4, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def testCaseLookupDomainCheck(self):
        self.factory.domain = 'wrong-domain'
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
            }))
        self.assertEqual(0, len(self.accessor.get_case_ids_in_domain()))
        config = self._config(self.default_headers)
        file = MockExcelFile(header_columns=self.default_headers,
                             num_rows=3,
                             row_generator=id_match_generator(case.case_id))
        res = do_import(file, config, self.domain)

        # because the domain is wrong these shouldn't match
        self.assertEqual(3, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(3, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def testExternalIdMatching(self):
        # bootstrap a stub case
        external_id = 'importer-test-external-id'
        [case] = self.factory.create_or_update_case(
            CaseStructure(attrs={
                'create': True,
                'external_id': external_id,
            }))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

        headers = ['external_id', 'age', 'sex', 'location']
        config = self._config(headers, search_field='external_id')
        file = MockExcelFile(header_columns=headers,
                             num_rows=3,
                             row_generator=id_match_generator(external_id))
        res = do_import(file, config, self.domain)
        self.assertEqual(0, res['created_count'])
        self.assertEqual(3, res['match_count'])
        self.assertFalse(res['errors'])

        # shouldn't create any more cases, just the one
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

    @run_with_all_backends
    def test_external_id_matching_on_create_with_custom_column_name(self):
        headers = ['id_column', 'age', 'sex', 'location']
        external_id = 'external-id-test'
        config = self._config(headers[1:],
                              search_column='id_column',
                              search_field='external_id')
        file = MockExcelFile(header_columns=headers,
                             num_rows=2,
                             row_generator=id_match_generator(external_id))
        res = do_import(file, config, self.domain)
        self.assertEqual(1, res['created_count'])
        self.assertEqual(1, res['match_count'])
        self.assertFalse(res['errors'])
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        case = self.accessor.get_case(case_ids[0])
        self.assertEqual(external_id, case.external_id)

    def testNoCreateNew(self):
        config = self._config(self.default_headers, create_new_cases=False)
        file = MockExcelFile(header_columns=self.default_headers, num_rows=5)
        res = do_import(file, config, self.domain)

        # no matching and no create new set - should do nothing
        self.assertEqual(0, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(0, len(get_case_ids_in_domain(self.domain)))

    def testBlankRows(self):
        # don't create new cases for rows left blank
        config = self._config(self.default_headers, create_new_cases=True)
        file = MockExcelFile(header_columns=self.default_headers,
                             num_rows=5,
                             row_generator=blank_row_generator)
        res = do_import(file, config, self.domain)

        # no matching and no create new set - should do nothing
        self.assertEqual(0, res['created_count'])
        self.assertEqual(0, res['match_count'])
        self.assertEqual(0, len(get_case_ids_in_domain(self.domain)))

    def testBasicChunking(self):
        config = self._config(self.default_headers)
        file = MockExcelFile(header_columns=self.default_headers, num_rows=5)
        res = do_import(file, config, self.domain, chunksize=2)
        # 5 cases in chunks of 2 = 3 chunks
        self.assertEqual(3, res['num_chunks'])
        self.assertEqual(5, res['created_count'])
        self.assertEqual(5, len(get_case_ids_in_domain(self.domain)))

    @run_with_all_backends
    def testExternalIdChunking(self):
        # bootstrap a stub case
        external_id = 'importer-test-external-id'

        headers = ['external_id', 'age', 'sex', 'location']
        config = self._config(headers, search_field='external_id')
        file = MockExcelFile(header_columns=headers,
                             num_rows=3,
                             row_generator=id_match_generator(external_id))

        # the first one should create the case, and the remaining two should update it
        res = do_import(file, config, self.domain)
        self.assertEqual(1, res['created_count'])
        self.assertEqual(2, res['match_count'])
        self.assertFalse(res['errors'])
        self.assertEqual(2,
                         res['num_chunks'])  # the lookup causes an extra chunk

        # should just create the one case
        case_ids = self.accessor.get_case_ids_in_domain()
        self.assertEqual(1, len(case_ids))
        [case] = self.accessor.get_cases(case_ids)
        self.assertEqual(external_id, case.external_id)
        for prop in self.default_headers[1:]:
            self.assertTrue(prop in case.get_case_property(prop))

    @run_with_all_backends
    def testParentCase(self):
        headers = ['parent_id', 'name', 'case_id']
        config = self._config(headers,
                              create_new_cases=True,
                              search_column='case_id')
        rows = 3
        [parent_case] = self.factory.create_or_update_case(
            CaseStructure(attrs={'create': True}))
        self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))

        file = MockExcelFile(header_columns=headers,
                             num_rows=rows,
                             row_generator=id_match_generator(
                                 parent_case.case_id))
        file_missing = MockExcelFile(header_columns=headers, num_rows=rows)

        # Should successfully match on `rows` cases
        res = do_import(file, config, self.domain)
        self.assertEqual(rows, res['created_count'])

        # Should be unable to find parent case on `rows` cases
        res = do_import(file_missing, config, self.domain)
        error_column_name = 'parent_id'
        self.assertEqual(
            rows,
            len(res['errors'][ImportErrors.InvalidParentId][error_column_name]
                ['rows']), "All cases should have missing parent")

    def import_mock_file(self, rows):
        config = self._config(rows[0])
        case_rows = rows[1:]
        num_rows = len(case_rows)
        xls_file = MockExcelFile(
            header_columns=rows[0],
            num_rows=num_rows,
            row_generator=lambda _, i: case_rows[i],
        )
        return do_import(xls_file, config, self.domain)

    @run_with_all_backends
    def testLocationOwner(self):
        # This is actually testing several different things, but I figure it's
        # worth it, as each of these tests takes a non-trivial amount of time.
        non_case_sharing = LocationType.objects.create(domain=self.domain,
                                                       name='lt1',
                                                       shares_cases=False)
        case_sharing = LocationType.objects.create(domain=self.domain,
                                                   name='lt2',
                                                   shares_cases=True)
        location = make_loc('loc-1', 'Loc 1', self.domain, case_sharing)
        make_loc('loc-2', 'Loc 2', self.domain, case_sharing)
        duplicate_loc = make_loc('loc-3', 'Loc 2', self.domain, case_sharing)
        improper_loc = make_loc('loc-4', 'Loc 4', self.domain,
                                non_case_sharing)

        res = self.import_mock_file([
            ['case_id', 'name', 'owner_id', 'owner_name'],
            ['', 'location-owner-id', location.group_id, ''],
            ['', 'location-owner-code', '', location.site_code],
            ['', 'location-owner-name', '', location.name],
            ['', 'duplicate-location-name', '', duplicate_loc.name],
            ['', 'non-case-owning-name', '', improper_loc.name],
        ])
        case_ids = self.accessor.get_case_ids_in_domain()
        cases = {c.name: c for c in list(self.accessor.get_cases(case_ids))}

        self.assertEqual(cases['location-owner-id'].owner_id,
                         location.group_id)
        self.assertEqual(cases['location-owner-code'].owner_id,
                         location.group_id)
        self.assertEqual(cases['location-owner-name'].owner_id,
                         location.group_id)

        error_message = ImportErrors.DuplicateLocationName
        error_column_name = None
        self.assertIn(error_message, res['errors'])
        self.assertEqual(
            res['errors'][error_message][error_column_name]['rows'], [4])

        error_message = ImportErrors.InvalidOwnerId
        self.assertIn(error_message, res['errors'])
        error_column_name = 'owner_id'
        self.assertEqual(
            res['errors'][error_message][error_column_name]['rows'], [5])

    def _typeTest(self, type_fields, error_message):
        config = self._config(self.default_headers, type_fields=type_fields)
        file = MockExcelFile(header_columns=self.default_headers, num_rows=3)
        res = do_import(file, config, self.domain)
        self.assertIn(self.default_headers[1], res['errors'][error_message])
        self.assertEqual(
            res['errors'][error_message][self.default_headers[1]]['rows'],
            [1, 2, 3])

    def testDateError(self):
        self._typeTest(['plain', 'date', 'plain', 'plain'],
                       ImportErrors.InvalidDate)

    def testIntegerError(self):
        self._typeTest(['plain', 'integer', 'plain', 'plain'],
                       ImportErrors.InvalidInteger)
Beispiel #48
0
class TestFormArchiving(TestCase, TestFileMixin):
    file_path = ('data', 'sample_xforms')
    root = os.path.dirname(__file__)

    def setUp(self):
        super(TestFormArchiving, self).setUp()
        self.casedb = CaseAccessors('test-domain')
        self.formdb = FormAccessors('test-domain')

    def tearDown(self):
        FormProcessorTestUtils.delete_all_xforms()
        FormProcessorTestUtils.delete_all_cases()
        super(TestFormArchiving, self).tearDown()

    def testArchive(self):
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        lower_bound = datetime.utcnow() - timedelta(seconds=1)
        xform.archive(user_id='mr. librarian')
        upper_bound = datetime.utcnow() + timedelta(seconds=1)

        xform = self.formdb.get_form(xform.form_id)
        self.assertTrue(xform.is_archived)
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)
        self.assertEqual(case.xform_ids, [])

        [archival] = xform.history
        self.assertTrue(lower_bound <= archival.date <= upper_bound)
        self.assertEqual('archive', archival.operation)
        self.assertEqual('mr. librarian', archival.user)

        lower_bound = datetime.utcnow() - timedelta(seconds=1)
        xform.unarchive(user_id='mr. researcher')
        upper_bound = datetime.utcnow() + timedelta(seconds=1)

        xform = self.formdb.get_form(xform.form_id)
        self.assertTrue(xform.is_normal)
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)
        self.assertEqual(case.xform_ids, [xform.form_id])

        [archival, restoration] = xform.history
        self.assertTrue(lower_bound <= restoration.date <= upper_bound)
        self.assertEqual('unarchive', restoration.operation)
        self.assertEqual('mr. researcher', restoration.user)

    def testSignal(self):
        global archive_counter, restore_counter
        archive_counter = 0
        restore_counter = 0

        def count_archive(**kwargs):
            global archive_counter
            archive_counter += 1

        def count_unarchive(**kwargs):
            global restore_counter
            restore_counter += 1

        xform_archived.connect(count_archive)
        xform_unarchived.connect(count_unarchive)

        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )

        self.assertEqual(0, archive_counter)
        self.assertEqual(0, restore_counter)

        result.xform.archive()
        self.assertEqual(1, archive_counter)
        self.assertEqual(0, restore_counter)

        xform = self.formdb.get_form(result.xform.form_id)
        xform.unarchive()
        self.assertEqual(1, archive_counter)
        self.assertEqual(1, restore_counter)
 def _get_case_ids(self):
     return CaseAccessors(domain=self.domain_name).get_case_ids_in_domain()
Beispiel #50
0
 def setUp(self):
     super(EditFormTest, self).setUp()
     self.interface = FormProcessorInterface(self.domain)
     self.casedb = CaseAccessors(self.domain)
     self.formdb = FormAccessors(self.domain)
class ENikshay2BMigrator(object):
    def __init__(self, domain, commit):
        self.domain = domain
        self.commit = commit
        self.accessor = CaseAccessors(self.domain)
        self.factory = CaseFactory(self.domain)

        self.total_persons = 0
        self.total_occurrences = 0
        self.total_episodes = 0
        self.total_tests = 0
        self.total_referrals = 0
        self.total_trails = 0
        self.total_secondary_owners = 0
        self.total_drtb_hiv = 0

    @property
    @memoized
    def locations(self):
        return {loc.location_id: loc for loc in
                SQLLocation.objects.filter(domain=self.domain).prefetch_related('location_type')}

    @property
    @memoized
    def location_ids_by_pk(self):
        return {loc.pk: loc.location_id for loc in self.locations.values()}

    def get_ancestors_by_type(self, location):
        """Get all direct ancestors found in self.locations"""
        ancestors_by_type = {location.location_type.code: location}
        loc = location
        while loc.parent_id and loc.parent_id in self.location_ids_by_pk:
            parent = self.locations[self.location_ids_by_pk[loc.parent_id]]
            ancestors_by_type[parent.location_type.code] = parent
            loc = parent
        return ancestors_by_type

    def migrate(self):
        person_ids = self.get_relevant_person_case_ids()
        persons = self.get_relevant_person_case_sets(person_ids)
        for person in with_progress_bar(persons, len(person_ids)):
            self.migrate_person_case_set(person)

    def get_relevant_person_case_ids(self):
        return self.accessor.get_case_ids_in_domain(CASE_TYPE_PERSON)

    def get_relevant_person_case_sets(self, person_ids):
        """
        Generator returning all relevant cases for the migration, grouped by person.

        This is a pretty nasty method, but it was the only way I could figure
        out how to group the queries together, rather than performing multiple
        queries per person case.
        """
        for person_chunk in chunked(person_ids, 100):
            person_chunk = list(filter(None, person_chunk))
            all_persons = {}  # case_id: PersonCaseSet
            for person in self.accessor.get_cases(person_chunk):
                # enrolled_in_private is blank/not set AND case_version is blank/not set
                # AND owner_id is within the location set being migrated
                if (person.get_case_property(ENROLLED_IN_PRIVATE) != 'true'
                        and not person.get_case_property(CASE_VERSION)):
                    all_persons[person.case_id] = PersonCaseSet(person)

            referrals_and_occurrences_to_person = {}
            type_to_bucket = {CASE_TYPE_OCCURRENCE: 'occurrences',
                              CASE_TYPE_REFERRAL: 'referrals',
                              CASE_TYPE_TRAIL: 'trails'}
            for case in self.accessor.get_reverse_indexed_cases(
                    [person_id for person_id in all_persons]):
                bucket = type_to_bucket.get(case.type, None)
                if bucket:
                    for index in case.indices:
                        if index.referenced_id in all_persons:
                            getattr(all_persons[index.referenced_id], bucket).append(case)
                            if bucket != 'trails':
                                referrals_and_occurrences_to_person[case.case_id] = index.referenced_id
                            break

            type_to_bucket = {CASE_TYPE_EPISODE: 'episodes',
                              CASE_TYPE_TEST: 'tests',
                              CASE_TYPE_TRAIL: 'trails'}
            episodes_to_person = {}
            for case in self.accessor.get_reverse_indexed_cases(list(referrals_and_occurrences_to_person)):
                bucket = type_to_bucket.get(case.type, None)
                if bucket:
                    for index in case.indices:
                        person_id = referrals_and_occurrences_to_person.get(index.referenced_id)
                        if person_id:
                            getattr(all_persons[person_id], bucket).append(case)
                            if case.type == CASE_TYPE_EPISODE:
                                episodes_to_person[case.case_id] = person_id
                            break

            for case in self.accessor.get_reverse_indexed_cases(list(episodes_to_person)):
                if case.type == CASE_TYPE_DRTB_HIV_REFERRAL:
                    for index in case.indices:
                        person_id = episodes_to_person.get(index.referenced_id)
                        if person_id:
                            all_persons[person_id].drtb_hiv.append(case)
                            break

            for person in all_persons.values():
                if person.occurrences:
                    person.latest_occurrence = max((case.opened_on, case)
                                                   for case in person.occurrences)[1]
                yield person
Beispiel #52
0
class EditFormTest(TestCase, TestFileMixin):
    ID = '7H46J37FGH3'
    domain = 'test-form-edits'

    file_path = ('data', 'deprecation')
    root = os.path.dirname(__file__)

    def setUp(self):
        super(EditFormTest, self).setUp()
        self.interface = FormProcessorInterface(self.domain)
        self.casedb = CaseAccessors(self.domain)
        self.formdb = FormAccessors(self.domain)

    def tearDown(self):
        FormProcessorTestUtils.delete_all_xforms(self.domain)
        FormProcessorTestUtils.delete_all_cases(self.domain)
        UnfinishedSubmissionStub.objects.all().delete()
        super(EditFormTest, self).tearDown()

    def test_basic_edit(self):
        original_xml = self.get_xml('original')
        edit_xml = self.get_xml('edit')

        xform = submit_form_locally(original_xml, self.domain).xform

        self.assertEqual(self.ID, xform.form_id)
        self.assertTrue(xform.is_normal)
        self.assertEqual("", xform.form_data['vitals']['height'])
        self.assertEqual("other", xform.form_data['assessment']['categories'])

        xform = submit_form_locally(edit_xml, self.domain).xform
        self.assertEqual(self.ID, xform.form_id)
        self.assertTrue(xform.is_normal)
        self.assertEqual("100", xform.form_data['vitals']['height'])
        self.assertEqual("Edited Baby!",
                         xform.form_data['assessment']['categories'])

        self.assertEqual(1, len(xform.history))
        self.assertEqual('edit', xform.history[0].operation)

        deprecated_xform = self.formdb.get_form(xform.deprecated_form_id)

        self.assertEqual(self.ID, deprecated_xform.orig_id)
        self.assertNotEqual(self.ID, deprecated_xform.form_id)
        self.assertTrue(deprecated_xform.is_deprecated)
        self.assertEqual("", deprecated_xform.form_data['vitals']['height'])
        self.assertEqual(
            "other", deprecated_xform.form_data['assessment']['categories'])

        self.assertEqual(xform.received_on, deprecated_xform.received_on)
        self.assertEqual(xform.deprecated_form_id, deprecated_xform.form_id)
        self.assertTrue(xform.edited_on > deprecated_xform.received_on)

        self.assertEqual(deprecated_xform.get_xml(), original_xml)
        self.assertEqual(xform.get_xml(), edit_xml)

    def test_edit_form_with_attachments(self):
        attachment_source = './corehq/ex-submodules/casexml/apps/case/tests/data/attachments/fruity.jpg'
        attachment_file = open(attachment_source, 'rb')
        attachments = {
            'fruity_file':
            UploadedFile(attachment_file,
                         'fruity_file',
                         content_type='image/jpeg')
        }

        def _get_xml(date, form_id):
            return """<?xml version='1.0' ?>
               <data uiVersion="1" version="1" name="" xmlns="http://openrosa.org/formdesigner/123">
                   <name>fgg</name>
                   <date>2011-06-07</date>
                   <n1:meta xmlns:n1="http://openrosa.org/jr/xforms">
                       <n1:deviceID>354957031935664</n1:deviceID>
                       <n1:timeStart>{date}</n1:timeStart>
                       <n1:timeEnd>{date}</n1:timeEnd>
                       <n1:username>bcdemo</n1:username>
                       <n1:userID>user-abc</n1:userID>
                       <n1:instanceID>{form_id}</n1:instanceID>
                   </n1:meta>
               </data>""".format(date=date,
                                 attachment_source=attachment_source,
                                 form_id=form_id)

        form_id = uuid.uuid4().hex
        original_xml = _get_xml('2016-03-01T12:04:16Z', form_id)
        submit_form_locally(
            original_xml,
            self.domain,
            attachments=attachments,
        )
        form = self.formdb.get_form(form_id)
        self.assertIn('fruity_file', form.attachments)
        self.assertIn(original_xml, form.get_xml())

        # edit form
        edit_xml = _get_xml('2016-04-01T12:04:16Z', form_id)
        submit_form_locally(
            edit_xml,
            self.domain,
        )
        form = self.formdb.get_form(form_id)
        self.assertIsNotNone(form.edited_on)
        self.assertIsNotNone(form.deprecated_form_id)
        self.assertIn('fruity_file', form.attachments)
        self.assertIn(edit_xml, form.get_xml())

    def test_edit_an_error(self):
        form_id = uuid.uuid4().hex
        case_block = CaseBlock(
            create=True,
            case_id='',  # this should cause the submission to error
            case_type='person',
            owner_id='some-owner',
        )

        form, _ = submit_case_blocks(case_block.as_string(),
                                     domain=self.domain,
                                     form_id=form_id)
        self.assertTrue(form.is_error)
        self.assertTrue('IllegalCaseId' in form.problem)

        case_block.case_id = uuid.uuid4().hex
        form, _ = submit_case_blocks(case_block.as_string(),
                                     domain=self.domain,
                                     form_id=form_id)
        self.assertFalse(form.is_error)
        self.assertEqual(None, getattr(form, 'problem', None))

    def test_broken_save(self):
        """
        Test that if the second form submission terminates unexpectedly
        and the main form isn't saved, then there are no side effects
        such as the original having been marked as deprecated.
        """

        original_xml = self.get_xml('original')
        edit_xml = self.get_xml('edit')

        result = submit_form_locally(original_xml, self.domain)
        xform = result.xform
        self.assertEqual(self.ID, xform.form_id)
        self.assertTrue(xform.is_normal)
        self.assertEqual(self.domain, xform.domain)

        self.assertEqual(
            UnfinishedSubmissionStub.objects.filter(xform_id=self.ID).count(),
            0)

        with patch.object(self.interface.processor,
                          'save_processed_models',
                          side_effect=HTTPError):
            with self.assertRaises(HTTPError):
                submit_form_locally(edit_xml, self.domain)

        # it didn't go through, so make sure there are no edits still
        self.assertIsNone(getattr(xform, 'deprecated_form_id', None))

        xform = self.formdb.get_form(self.ID)
        self.assertIsNotNone(xform)
        self.assertEqual(
            UnfinishedSubmissionStub.objects.filter(xform_id=self.ID).count(),
            0)

    def test_case_management(self):
        form_id = uuid.uuid4().hex
        case_id = uuid.uuid4().hex
        owner_id = uuid.uuid4().hex
        case_block = CaseBlock(create=True,
                               case_id=case_id,
                               case_type='person',
                               owner_id=owner_id,
                               update={
                                   'property': 'original value'
                               }).as_string()
        submit_case_blocks(case_block, domain=self.domain, form_id=form_id)

        # validate some assumptions
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.type, 'person')
        self.assertEqual(case.dynamic_case_properties()['property'],
                         'original value')
        self.assertEqual([form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertEqual(2, len(case.actions))
            for a in case.actions:
                self.assertEqual(form_id, a.xform_id)

        # submit a new form with a different case update
        case_block = CaseBlock(create=True,
                               case_id=case_id,
                               case_type='newtype',
                               owner_id=owner_id,
                               update={
                                   'property': 'edited value'
                               }).as_string()
        xform, _ = submit_case_blocks(case_block,
                                      domain=self.domain,
                                      form_id=form_id)

        case = self.casedb.get_case(case_id)
        self.assertEqual(case.type, 'newtype')
        self.assertEqual(case.dynamic_case_properties()['property'],
                         'edited value')
        self.assertEqual([form_id], case.xform_ids)
        self.assertEqual(case.server_modified_on, xform.edited_on)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertEqual(2, len(case.actions))
            for a in case.actions:
                self.assertEqual(form_id, a.xform_id)

    def test_second_edit_fails(self):
        form_id = uuid.uuid4().hex
        case_id = uuid.uuid4().hex
        case_block = CaseBlock(
            create=True,
            case_id=case_id,
            case_type='person',
        ).as_string()
        submit_case_blocks(case_block, domain=self.domain, form_id=form_id)

        # submit an edit form with a bad case update (for example a bad ID)
        case_block = CaseBlock(
            create=True,
            case_id='',
            case_type='person',
        ).as_string()
        submit_case_blocks(case_block, domain=self.domain, form_id=form_id)

        xform = self.formdb.get_form(form_id)
        self.assertTrue(xform.is_error)

        deprecated_xform = self.formdb.get_form(xform.deprecated_form_id)
        self.assertTrue(deprecated_xform.is_deprecated)

    def test_case_management_ordering(self):
        case_id = uuid.uuid4().hex
        owner_id = uuid.uuid4().hex

        # create a case
        case_block = CaseBlock(
            create=True,
            case_id=case_id,
            case_type='person',
            owner_id=owner_id,
        ).as_string()
        create_form_id = submit_case_blocks(case_block,
                                            domain=self.domain)[0].form_id

        # validate that worked
        case = self.casedb.get_case(case_id)
        self.assertEqual([create_form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertTrue(
                create_form_id in [a.xform_id for a in case.actions])
            for a in case.actions:
                self.assertEqual(create_form_id, a.xform_id)

        edit_date = datetime.utcnow()
        # set some property value
        case_block = CaseBlock(create=False,
                               case_id=case_id,
                               date_modified=edit_date,
                               update={
                                   'property': 'first value',
                               }).as_string()
        edit_form_id = submit_case_blocks(case_block,
                                          domain=self.domain)[0].form_id

        # validate that worked
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.dynamic_case_properties()['property'],
                         'first value')
        self.assertEqual([create_form_id, edit_form_id], case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertTrue(
                all(form_id in [a.xform_id for a in case.actions]
                    for form_id in [create_form_id, edit_form_id]))

        # submit a second (new) form updating the value
        case_block = CaseBlock(create=False,
                               case_id=case_id,
                               update={
                                   'property': 'final value',
                               }).as_string()
        second_edit_form_id = submit_case_blocks(case_block,
                                                 domain=self.domain)[0].form_id

        # validate that worked
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.dynamic_case_properties()['property'],
                         'final value')
        self.assertEqual([create_form_id, edit_form_id, second_edit_form_id],
                         case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertTrue(
                all(form_id in [a.xform_id for a in case.actions] for form_id
                    in [create_form_id, edit_form_id, second_edit_form_id]))

        # deprecate the middle edit
        case_block = CaseBlock(
            create=False,
            case_id=case_id,
            date_modified=
            edit_date,  # need to use the previous edit date for action sort comparisons
            update={
                'property': 'edited value',
                'added_property': 'added value',
            }).as_string()
        submit_case_blocks(case_block,
                           domain=self.domain,
                           form_id=edit_form_id)

        # ensure that the middle edit stays in the right place and is applied
        # before the final one
        case = self.casedb.get_case(case_id)
        self.assertEqual(case.dynamic_case_properties()['property'],
                         'final value')
        self.assertEqual(case.dynamic_case_properties()['added_property'],
                         'added value')
        self.assertEqual([create_form_id, edit_form_id, second_edit_form_id],
                         case.xform_ids)

        if not getattr(settings, 'TESTS_SHOULD_USE_SQL_BACKEND', False):
            self.assertEqual([
                create_form_id, create_form_id, edit_form_id,
                second_edit_form_id
            ], [a.xform_id for a in case.actions])

    @softer_assert()
    def test_edit_different_xmlns(self):
        form_id = uuid.uuid4().hex
        case1_id = uuid.uuid4().hex
        case2_id = uuid.uuid4().hex
        xmlns1 = 'http://commcarehq.org/xmlns1'
        xmlns2 = 'http://commcarehq.org/xmlns2'

        case_block = CaseBlock(
            create=True,
            case_id=case1_id,
            case_type='person',
            owner_id='owner1',
        ).as_string()
        xform, cases = submit_case_blocks(case_block,
                                          domain=self.domain,
                                          xmlns=xmlns1,
                                          form_id=form_id)

        self.assertTrue(xform.is_normal)
        self.assertEqual(form_id, xform.form_id)

        case_block = CaseBlock(
            create=True,
            case_id=case2_id,
            case_type='goat',
            owner_id='owner1',
        ).as_string()
        # submit new form with same form ID but different XMLNS
        xform, cases = submit_case_blocks(case_block,
                                          domain=self.domain,
                                          xmlns=xmlns2,
                                          form_id=form_id)

        self.assertTrue(xform.is_normal)
        self.assertNotEqual(form_id,
                            xform.form_id)  # form should have a different ID

    def test_copy_operations(self):
        original_xml = self.get_xml('original')
        edit_xml = self.get_xml('edit')

        xform = submit_form_locally(original_xml, self.domain).xform
        xform.archive(user_id='user1')
        xform.unarchive(user_id='user2')

        xform = submit_form_locally(edit_xml, self.domain).xform
        self.assertEqual(3, len(xform.history))
        self.assertEqual('archive', xform.history[0].operation)
        self.assertEqual('unarchive', xform.history[1].operation)
        self.assertEqual('edit', xform.history[2].operation)
Beispiel #53
0
 def process_change(self, change):
     try:
         case = CaseAccessors(change.metadata.domain).get_case(change.id)
class TestFormArchiving(TestCase, TestFileMixin):
    file_path = ('data', 'sample_xforms')
    root = os.path.dirname(__file__)

    def setUp(self):
        super(TestFormArchiving, self).setUp()
        self.casedb = CaseAccessors('test-domain')
        self.formdb = FormAccessors('test-domain')

    def tearDown(self):
        FormProcessorTestUtils.delete_all_xforms()
        FormProcessorTestUtils.delete_all_cases()
        super(TestFormArchiving, self).tearDown()

    def testArchive(self):
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        lower_bound = datetime.utcnow() - timedelta(seconds=1)
        xform.archive(user_id='mr. librarian')
        upper_bound = datetime.utcnow() + timedelta(seconds=1)

        xform = self.formdb.get_form(xform.form_id)
        self.assertTrue(xform.is_archived)
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)
        self.assertEqual(case.xform_ids, [])

        [archival] = xform.history
        self.assertTrue(lower_bound <= archival.date <= upper_bound)
        self.assertEqual('archive', archival.operation)
        self.assertEqual('mr. librarian', archival.user)

        lower_bound = datetime.utcnow() - timedelta(seconds=1)
        xform.unarchive(user_id='mr. researcher')
        upper_bound = datetime.utcnow() + timedelta(seconds=1)

        xform = self.formdb.get_form(xform.form_id)
        self.assertTrue(xform.is_normal)
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)
        self.assertEqual(case.xform_ids, [xform.form_id])

        [archival, restoration] = xform.history
        self.assertTrue(lower_bound <= restoration.date <= upper_bound)
        self.assertEqual('unarchive', restoration.operation)
        self.assertEqual('mr. researcher', restoration.user)

    def testUnfinishedArchiveStub(self):
        # Test running the celery task reprocess_archive_stubs on an existing archive stub
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        # Mock the archive function throwing an error
        with mock.patch('couchforms.signals.xform_archived.send') as mock_send:
            try:
                mock_send.side_effect = Exception
                xform.archive(user_id='librarian')
            except Exception:
                pass

        # Get the form with the updated history, it should be archived
        xform = self.formdb.get_form(xform.form_id)
        self.assertEqual(1, len(xform.history))
        self.assertTrue(xform.is_archived)
        [archival] = xform.history
        self.assertEqual('archive', archival.operation)
        self.assertEqual('librarian', archival.user)

        # The case associated with the form should still exist, it was not rebuilt because of the exception
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)

        # There should be a stub for the unfinished archive
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 1)
        self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
        self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
        self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
        self.assertEqual(unfinished_archive_stubs[0].archive, True)

        # Manually call the periodic celery task that reruns archiving/unarchiving actions
        reprocess_archive_stubs()

        # The case and stub should both be deleted now
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)
        unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)

    def testUnfinishedUnarchiveStub(self):
        # Test running the celery task reprocess_archive_stubs on an existing unarchive stub
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        # Archive the form successfully
        xform.archive(user_id='librarian')

        # Mock the unarchive function throwing an error
        with mock.patch('couchforms.signals.xform_unarchived.send') as mock_send:
            try:
                mock_send.side_effect = Exception
                xform.unarchive(user_id='librarian')
            except Exception:
                pass

        # Make sure the history only has an archive and an unarchive
        xform = self.formdb.get_form(xform.form_id)
        self.assertEqual(2, len(xform.history))
        self.assertFalse(xform.is_archived)
        self.assertEqual('archive', xform.history[0].operation)
        self.assertEqual('librarian', xform.history[0].user)
        self.assertEqual('unarchive', xform.history[1].operation)
        self.assertEqual('librarian', xform.history[1].user)

        # The case should not exist because the unarchived form was not rebuilt
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)

        # There should be a stub for the unfinished unarchive
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 1)
        self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
        self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
        self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
        self.assertEqual(unfinished_archive_stubs[0].archive, False)

        # Manually call the periodic celery task that reruns archiving/unarchiving actions
        reprocess_archive_stubs()

        # The case should be back, and the stub should be deleted now
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)
        unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)

    def testUnarchivingWithArchiveStub(self):
        # Test a user-initiated unarchive with an existing archive stub
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))
        # Mock the archive function throwing an error
        with mock.patch('couchforms.signals.xform_archived.send') as mock_send:
            try:
                mock_send.side_effect = Exception
                xform.archive(user_id='librarian')
            except Exception:
                pass

        # There should be a stub for the unfinished archive
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 1)
        self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
        self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
        self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
        self.assertEqual(unfinished_archive_stubs[0].archive, True)

        # Call an unarchive
        xform.unarchive(user_id='librarian')

        # The unfinished archive stub should be deleted
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 0)

        # The case should exist because the case close was unarchived
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)

        # Manually call the periodic celery task that reruns archiving/unarchiving actions
        reprocess_archive_stubs()

        # Make sure the case still exists (to double check that the archive stub was deleted)
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)

    def testArchivingWithUnarchiveStub(self):
        # Test a user-initiated archive with an existing unarchive stub
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        # Archive the form successfully
        xform.archive(user_id='librarian')

        # Mock the unarchive function throwing an error
        with mock.patch('couchforms.signals.xform_unarchived.send') as mock_send:
            try:
                mock_send.side_effect = Exception
                xform.unarchive(user_id='librarian')
            except Exception:
                pass

        # There should be a stub for the unfinished unarchive
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 1)
        self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
        self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
        self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
        self.assertEqual(unfinished_archive_stubs[0].archive, False)

        # Call an archive
        xform.archive(user_id='librarian')

        # The unfinished archive stub should be deleted
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 0)

        # The case should not exist because the case close was archived
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)

        # Manually call the periodic celery task that reruns archiving/unarchiving actions
        reprocess_archive_stubs()

        # The history should not have been added to, make sure that it still only has one entry

        # Make sure the case still does not exist (to double check that the unarchive stub was deleted)
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)

    def testUnfinishedArchiveStubErrorAddingHistory(self):
        # Test running the celery task reprocess_archive_stubs on an existing archive stub where the archive
        # initially failed on updating the history
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        # Mock the couch and sql archive function throwing an error (so that this test works for both)
        tmp = 'corehq.form_processor.backends.%s.dbaccessors.%s'
        with mock.patch(tmp % ('sql', 'FormAccessorSQL.set_archived_state')) as mock_operation_sql:
            with mock.patch(tmp % ('couch', 'XFormOperation')) as mock_operation_couch:
                try:
                    mock_operation_sql.side_effect = Exception
                    mock_operation_couch.side_effect = Exception
                    xform.archive(user_id='librarian')
                except Exception:
                    pass

        # Get the form with the updated history, make sure it has not been archived yet
        xform = self.formdb.get_form(xform.form_id)
        self.assertEqual(0, len(xform.history))
        self.assertFalse(xform.is_archived)

        # The case associated with the form should still exist, it was not rebuilt because of the exception
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)

        # There should be a stub for the unfinished archive, and the history should not be updated yet
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 1)
        self.assertEqual(unfinished_archive_stubs[0].history_updated, False)
        self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
        self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
        self.assertEqual(unfinished_archive_stubs[0].archive, True)

        # Manually call the periodic celery task that reruns archiving/unarchiving actions
        reprocess_archive_stubs()

        # Make sure the history shows an archive now
        xform = self.formdb.get_form(xform.form_id)
        self.assertEqual(1, len(xform.history))
        self.assertTrue(xform.is_archived)
        [archival] = xform.history
        self.assertEqual('archive', archival.operation)
        self.assertEqual('librarian', archival.user)

        # The case and stub should both be deleted now
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)
        unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)

    def testUnfinishedUnarchiveStubErrorAddingHistory(self):
        # Test running the celery task reprocess_archive_stubs on an existing archive stub where the archive
        # initially failed on updating the history
        case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )
        xform = result.xform
        self.assertTrue(xform.is_normal)
        self.assertEqual(0, len(xform.history))

        # Archive the form successfully
        xform.archive(user_id='librarian')

        # Mock the couch and sql archive function throwing an error (so that this test works for both)
        tmp = 'corehq.form_processor.backends.%s.dbaccessors.%s'
        with mock.patch(tmp % ('sql', 'FormAccessorSQL.set_archived_state')) as mock_operation_sql:
            with mock.patch(tmp % ('couch', 'XFormOperation')) as mock_operation_couch:
                try:
                    mock_operation_sql.side_effect = Exception
                    mock_operation_couch.side_effect = Exception
                    xform.unarchive(user_id='librarian')
                except Exception:
                    pass

        # Get the form with the updated history, make sure it only has one entry (the archive)
        xform = self.formdb.get_form(xform.form_id)
        self.assertEqual(1, len(xform.history))
        self.assertTrue(xform.is_archived)
        [archival] = xform.history
        self.assertEqual('archive', archival.operation)
        self.assertEqual('librarian', archival.user)

        # The case associated with the form should not exist, it was not rebuilt because of the exception
        case = self.casedb.get_case(case_id)
        self.assertTrue(case.is_deleted)

        # There should be a stub for the unfinished archive, and the history should not be updated yet
        unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs), 1)
        self.assertEqual(unfinished_archive_stubs[0].history_updated, False)
        self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
        self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
        self.assertEqual(unfinished_archive_stubs[0].archive, False)

        # Manually call the periodic celery task that reruns archiving/unarchiving actions
        reprocess_archive_stubs()

        # Make sure the history shows an archive and an unarchive now
        xform = self.formdb.get_form(xform.form_id)
        self.assertEqual(2, len(xform.history))
        self.assertFalse(xform.is_archived)
        self.assertEqual('archive', xform.history[0].operation)
        self.assertEqual('librarian', xform.history[0].user)
        self.assertEqual('unarchive', xform.history[1].operation)
        self.assertEqual('librarian', xform.history[1].user)

        # The case should be back, and the stub should be deleted now
        case = self.casedb.get_case(case_id)
        self.assertFalse(case.is_deleted)
        unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
        self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)

    def testSignal(self):
        global archive_counter, restore_counter
        archive_counter = 0
        restore_counter = 0

        def count_archive(**kwargs):
            global archive_counter
            archive_counter += 1

        def count_unarchive(**kwargs):
            global restore_counter
            restore_counter += 1

        xform_archived.connect(count_archive)
        xform_unarchived.connect(count_unarchive)

        xml_data = self.get_xml('basic')
        result = submit_form_locally(
            xml_data,
            'test-domain',
        )

        self.assertEqual(0, archive_counter)
        self.assertEqual(0, restore_counter)

        result.xform.archive()
        self.assertEqual(1, archive_counter)
        self.assertEqual(0, restore_counter)

        xform = self.formdb.get_form(result.xform.form_id)
        xform.unarchive()
        self.assertEqual(1, archive_counter)
        self.assertEqual(1, restore_counter)
Beispiel #55
0
class CleanOwnerSyncPayload(object):
    def __init__(self, timing_context, case_ids_to_sync, restore_state):
        self.restore_state = restore_state
        self.case_accessor = CaseAccessors(self.restore_state.domain)

        self.case_ids_to_sync = case_ids_to_sync
        self.all_maybe_syncing = copy(case_ids_to_sync)
        self.checked_cases = set()
        self.child_indices = defaultdict(set)
        self.extension_indices = defaultdict(set)
        self.all_dependencies_syncing = set()
        self.closed_cases = set()
        self.potential_elements_to_sync = {}

        self.timing_context = timing_context
        self._track_load = case_load_counter("cleanowner_restore",
                                             restore_state.domain)

    def extend_response(self, response):
        with self.timing_context('process_case_batches'):
            while self.case_ids_to_sync:
                self.process_case_batch(self._get_next_case_batch())

        with self.timing_context('update_index_trees'):
            self.update_index_trees()

        with self.timing_context('update_case_ids_on_phone'):
            self.update_case_ids_on_phone()

        with self.timing_context(
                'move_no_longer_owned_cases_to_dependent_list_if_necessary'):
            self.move_no_longer_owned_cases_to_dependent_list_if_necessary()

        with self.timing_context('purge_and_get_irrelevant_cases'):
            irrelevant_cases = self.purge_and_get_irrelevant_cases()

        with self.timing_context('compile_response'):
            self.compile_response(irrelevant_cases, response)

    def _get_next_case_batch(self):
        ids = pop_ids(self.case_ids_to_sync, chunk_size)
        self._track_load(len(ids))
        return [
            case for case in self.case_accessor.get_cases(ids)
            if not case.is_deleted and case_needs_to_sync(
                case, last_sync_log=self.restore_state.last_sync_log)
        ]

    def process_case_batch(self, case_batch):
        updates = get_case_sync_updates(self.restore_state.domain, case_batch,
                                        self.restore_state.last_sync_log)

        for update in updates:
            case = update.case
            self.potential_elements_to_sync[
                case.case_id] = PotentialSyncElement(
                    case_stub=CaseStub(case.case_id, case.type),
                    sync_xml_items=get_xml_for_response(
                        update, self.restore_state))
            self._process_case_update(case)
            self._mark_case_as_checked(case)
Beispiel #56
0
 def get(self, request, domain, episode_id, *args, **kwargs):
     try:
         episode = CaseAccessors(domain).get_case(episode_id)
         return JsonResponse(EpisodeAdherenceUpdate(domain, episode).update_json())
     except EnikshayTaskException, e:
         return HttpResponse(e)
Beispiel #57
0
class CleanOwnerCaseSyncOperation(object):
    def __init__(self, timing_context, restore_state, async_task=None):
        self.timing_context = timing_context
        self.restore_state = restore_state
        self.case_accessor = CaseAccessors(self.restore_state.domain)
        self.async_task = async_task

    @property
    @memoized
    def cleanliness_flags(self):
        return dict(
            OwnershipCleanlinessFlag.objects.filter(
                domain=self.restore_state.domain,
                owner_id__in=self.restore_state.owner_ids).values_list(
                    'owner_id', 'is_clean'))

    @property
    def payload_class(self):
        if self.async_task is not None:
            return partial(AsyncCleanOwnerPayload,
                           current_task=self.async_task)
        return CleanOwnerSyncPayload

    def is_clean(self, owner_id):
        return self.cleanliness_flags.get(owner_id, False)

    def is_new_owner(self, owner_id):
        return (self.restore_state.is_initial or owner_id
                not in self.restore_state.last_sync_log.owner_ids_on_phone)

    def extend_response(self, response):
        with self.timing_context('get_case_ids_to_sync'):
            case_ids_to_sync = self.get_case_ids_to_sync()
        sync_payload = self.payload_class(self.timing_context,
                                          case_ids_to_sync, self.restore_state)
        return sync_payload.extend_response(response)

    def get_case_ids_to_sync(self):
        case_ids_to_sync = set()
        for owner_id in self.restore_state.owner_ids:
            case_ids_to_sync = case_ids_to_sync | set(
                self.get_case_ids_for_owner(owner_id))

        if (not self.restore_state.is_initial and any([
                not self.is_clean(owner_id)
                for owner_id in self.restore_state.owner_ids
        ])):
            # if it's a steady state sync and we have any dirty owners, then we also need to
            # include ALL cases on the phone that have been modified since the last sync as
            # possible candidates to sync (since they may have been closed or reassigned by someone else)

            # don't bother checking ones we've already decided to check
            other_ids_to_check = self.restore_state.last_sync_log.case_ids_on_phone - case_ids_to_sync
            case_ids_to_sync = case_ids_to_sync | set(
                filter_cases_modified_since(
                    self.case_accessor, list(other_ids_to_check),
                    self.restore_state.last_sync_log.date))
        return case_ids_to_sync

    def get_case_ids_for_owner(self, owner_id):
        if EXTENSION_CASES_SYNC_ENABLED.enabled(self.restore_state.domain):
            return self._get_case_ids_for_owners_with_extensions(owner_id)
        else:
            return self._get_case_ids_for_owners_without_extensions(owner_id)

    def _get_case_ids_for_owners_without_extensions(self, owner_id):
        if self.is_clean(owner_id):
            if self.is_new_owner(owner_id):
                # for a clean owner's initial sync the base set is just the open ids
                return set(
                    self.case_accessor.get_open_case_ids_for_owner(owner_id))
            else:
                # for a clean owner's steady state sync, the base set is anything modified since last sync
                return set(
                    self.case_accessor.get_case_ids_modified_with_owner_since(
                        owner_id, self.restore_state.last_sync_log.date))
        else:
            # TODO: we may want to be smarter than this
            # right now just return the whole footprint and do any filtering later
            # Note: This will also return extensions if they exist.
            return get_case_footprint_info(self.restore_state.domain,
                                           owner_id).all_ids

    def _get_case_ids_for_owners_with_extensions(self, owner_id):
        """Fetches base and extra cases when extensions are enabled"""
        if not self.is_clean(
                owner_id) or self.restore_state.is_first_extension_sync:
            # If this is the first time a user with extensions has synced after
            # the extension flag is toggled, pull all the cases so that the
            # extension parameters get set correctly
            return get_case_footprint_info(self.restore_state.domain,
                                           owner_id).all_ids
        else:
            if self.is_new_owner(owner_id):
                # for a clean owner's initial sync the base set is just the open ids and their extensions
                all_case_ids = set(
                    self.case_accessor.get_open_case_ids_for_owner(owner_id))
                new_case_ids = set(all_case_ids)
                while new_case_ids:
                    all_case_ids = all_case_ids | new_case_ids
                    extension_case_ids = set(
                        self.case_accessor.get_extension_case_ids(
                            new_case_ids))
                    new_case_ids = extension_case_ids - all_case_ids
                return all_case_ids
            else:
                # for a clean owner's steady state sync, the base set is anything modified since last sync
                modified_non_extension_cases = set(
                    self.case_accessor.get_case_ids_modified_with_owner_since(
                        owner_id, self.restore_state.last_sync_log.date))
                # we also need to fetch unowned extension cases that have been modified
                extension_case_ids = list(self.restore_state.last_sync_log.
                                          extension_index_tree.indices.keys())
                modified_extension_cases = set(
                    filter_cases_modified_since(
                        self.case_accessor, extension_case_ids,
                        self.restore_state.last_sync_log.date))
                return modified_non_extension_cases | modified_extension_cases
Beispiel #58
0
 def recipient(self):
     if self.recipient_type == self.RECIPIENT_TYPE_CASE:
         try:
             case = CaseAccessors(self.domain).get_case(self.recipient_id)
         except CaseNotFound:
             return None
Beispiel #59
0

def sync_user_case(commcare_user, case_type, owner_id, case=None):
    """
    Each time a CommCareUser is saved this method gets called and creates or updates
    a case associated with the user with the user's details.

    This is also called to create user cases when the usercase is used for the
    first time.
    """
    with CriticalSection(
        ['user_case_%s_for_%s' % (case_type, commcare_user._id)]):
        domain = commcare_user.project
        fields = _get_user_case_fields(commcare_user, case_type, owner_id)
        case = case or CaseAccessors(
            domain.name).get_case_by_domain_hq_user_id(commcare_user._id,
                                                       case_type)
        close = commcare_user.to_be_deleted() or not commcare_user.is_active
        user_case_helper = _UserCaseHelper(domain, owner_id)

        def case_should_be_reopened(case, user_case_should_be_closed):
            return case and case.closed and not user_case_should_be_closed

        if not case:
            user_case_helper.create_user_case(commcare_user, fields)
        else:
            if case_should_be_reopened(case, close):
                user_case_helper.re_open_case(case)
            changed_fields = _get_changed_fields(case, fields)
            close_case = close and not case.closed
            if changed_fields or close_case:
    def handle(self, domain, repeater_id, filename, **options):
        accessor = CaseAccessors(domain)
        records = iter_repeat_records_by_domain(domain,
                                                repeater_id=repeater_id)
        record_count = get_repeat_record_count(domain, repeater_id=repeater_id)

        row_names = [
            'VoucherID',
            'EventOccurDate',
            'EventID',
            'BeneficiaryUUID',
            'BeneficiaryType',
            'Location',
            'Amount',
            'DTOLocation',
            'InvestigationType',
            'PersonId',
            'AgencyId',
            'EnikshayApprover',
            'EnikshayRole',
            'EnikshayApprovalDate',
            'Succeeded',  # Some records did succeed when we sent them.
            # Include this so they don't re-pay people.
        ]

        seen_voucher_ids = set()
        duplicate_voucher_ids = set()
        errors = []
        with open(filename, 'w') as f:
            writer = csv.writer(f)
            writer.writerow(row_names)

            for record in with_progress_bar(records, length=record_count):
                try:
                    payload = json.loads(
                        record.get_payload())['voucher_details'][0]
                    voucher_id = record.payload_id
                    payload['Succeeded'] = record.succeeded
                except Exception as e:
                    errors.append([record.payload_id, six.text_type(e)])
                    continue
                if voucher_id in seen_voucher_ids:
                    duplicate_voucher_ids.add(voucher_id)
                else:
                    seen_voucher_ids.add(voucher_id)
                row = [
                    payload.get(name) if payload.get(name) is not None else ""
                    for name in row_names
                ]
                writer.writerow(row)

        print("{} duplicates found".format(len(duplicate_voucher_ids)))
        if duplicate_voucher_ids:
            with open('duplicates_{}'.format(filename), 'w') as f:
                writer = csv.writer(f)
                for duplicate_id in duplicate_voucher_ids:
                    writer.writerow([duplicate_id])

        print("{} errors".format(len(errors)))
        if errors:
            with open('errors_{}'.format(filename), 'w') as f:
                writer = csv.writer(f)
                writer.writerow(['episode_id', 'error'])
                for error in errors:
                    writer.writerow(errors)