Example #1
0
 def test_rebuild_indicators(self, datetime_mock):
     datetime_mock.utcnow.return_value = self.fake_time_now
     sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)
     CommCareCase.get_db().save_doc(sample_doc)
     self.addCleanup(lambda id: CommCareCase.get_db().delete_doc(id), sample_doc['_id'])
     rebuild_indicators(self.config._id)
     self._check_sample_doc_state(expected_indicators)
Example #2
0
 def test_rebuild_indicators(self, datetime_mock):
     datetime_mock.utcnow.return_value = self.fake_time_now
     self.config.save()
     sample_doc, _ = get_sample_doc_and_indicators(self.fake_time_now)
     CommCareCase.get_db().save_doc(sample_doc)
     rebuild_indicators(self.config._id)
     self._check_sample_doc_state()
Example #3
0
 def process_form_unarchived(self, form):
     from corehq.apps.commtrack.processing import process_stock
     from casexml.apps.case.models import CommCareCase
     result = process_stock([form])
     result.populate_models()
     result.commit()
     result.finalize()
     CommCareCase.get_db().bulk_save(result.relevant_cases)
Example #4
0
 def _iter_raw_cases(case_ids):
     if self.strip_history:
         for ids in chunked(case_ids, 100):
             for row in CommCareCase.get_db().view("case/get_lite", keys=ids, include_docs=False):
                 yield row["value"]
     else:
         for raw_case in iter_docs(CommCareCase.get_db(), case_ids):
             yield raw_case
Example #5
0
def resave_case(domain, case, send_post_save_signal=True):
    from corehq.form_processor.change_publishers import publish_case_saved
    if should_use_sql_backend(domain):
        publish_case_saved(case, send_post_save_signal)
    else:
        if send_post_save_signal:
            case.save()
        else:
            CommCareCase.get_db().save_doc(case._doc)  # don't just call save to avoid signals
def get_cases(domain):
    supply_point_ids = (case['id'] for case in CommCareCase.get_db().view(
        'supply_point_by_loc/view',
        startkey=[domain],
        endkey=[domain, {}],
        reduce=False,
        include_docs=False,
    ).all())
    return iter_docs(CommCareCase.get_db(), supply_point_ids)
Example #7
0
def reprocess_form(sender, xform, *args, **kwargs):
    from corehq.apps.commtrack.processing import process_stock
    result = process_stock([xform])
    for to_save in result.get_models_to_save():
        if to_save:
            to_save.commit()
    result.finalize()
    # todo: use LedgerProcessor
    CommCareCase.get_db().bulk_save(result.relevant_cases)
 def setUpClass(cls):
     cls.domain = 'lalksdjflakjsdf'
     cls.cases = [
         CommCareCase(domain=cls.domain, type='type1', name='Alice', user_id='XXX'),
         CommCareCase(domain=cls.domain, type='type2', name='Bob', user_id='XXX'),
         CommCareCase(domain=cls.domain, type='type1', name='Candice', user_id='ZZZ'),
         CommCareCase(domain=cls.domain, type='type1', name='Derek', user_id='XXX', closed=True),
         CommCareCase(domain='maleficent', type='type1', name='Mallory', user_id='YYY')
     ]
     CommCareCase.get_db().bulk_save(cls.cases)
def _update_case(domain, case_id, server_modified_on, last_visit_date=None):
    accessors = CaseAccessors(domain)
    case = accessors.get_case(case_id)
    case.server_modified_on = server_modified_on
    if last_visit_date:
        set_case_property_directly(case, 'last_visit_date', last_visit_date.strftime('%Y-%m-%d'))
    if should_use_sql_backend(domain):
        CaseAccessorSQL.save_case(case)
    else:
        # can't call case.save() since it overrides the server_modified_on property
        CommCareCase.get_db().save_doc(case.to_json())
Example #10
0
def tag_cases_as_deleted_and_remove_indices(domain, docs, deletion_id):
    from corehq.apps.sms.tasks import delete_phone_numbers_for_owners
    from corehq.apps.reminders.tasks import delete_reminders_for_cases
    for doc in docs:
        doc['doc_type'] += DELETED_SUFFIX
        doc['-deletion_id'] = deletion_id
    CommCareCase.get_db().bulk_save(docs)
    case_ids = [doc['_id'] for doc in docs]
    _remove_indices_from_deleted_cases_task.delay(domain, case_ids)
    delete_phone_numbers_for_owners.delay(case_ids)
    delete_reminders_for_cases.delay(domain, case_ids)
Example #11
0
 def test_simple_delete(self):
     factory = CaseFactory()
     case = factory.create_case()
     [case] = factory.create_or_update_case(CaseStructure(case_id=case._id, attrs={'update': {'foo': 'bar'}}))
     self.assertTrue(CommCareCase.get_db().doc_exist(case._id))
     self.assertEqual(2, len(case.xform_ids))
     for form_id in case.xform_ids:
         self.assertTrue(XFormInstance.get_db().doc_exist(form_id))
     safe_hard_delete(case)
     self.assertFalse(CommCareCase.get_db().doc_exist(case._id))
     for form_id in case.xform_ids:
         self.assertFalse(XFormInstance.get_db().doc_exist(form_id))
Example #12
0
    def test_delete_sharing_form(self):
        factory = CaseFactory()
        c1, c2 = factory.create_or_update_cases([
            CaseStructure(attrs={'create': True}),
            CaseStructure(attrs={'create': True}),
        ])
        with self.assertRaises(CommCareCaseError):
            safe_hard_delete(c1)

        with self.assertRaises(CommCareCaseError):
            safe_hard_delete(c2)

        self.assertTrue(CommCareCase.get_db().doc_exist(c1._id))
        self.assertTrue(CommCareCase.get_db().doc_exist(c2._id))
    def handle(self, *args, **options):
        with open('bihar_case_cleanup.csv', 'wb') as f:
            csv_file = csv.writer(f)
            csv_file.writerow(CaseRow.headers)

            blank_case_ids = get_case_ids_in_domain('care-bihar',
                                                    type=('', None))
            task_case_ids = get_case_ids_in_domain('care-bihar', type='task')

            case_ids = set(blank_case_ids) | set(task_case_ids)
            to_save = []

            logger.info("Total cases to process: {}".format(len(case_ids)))
            for i, doc in enumerate(iter_docs(CommCareCase.get_db(), case_ids)):
                case = CommCareCase.wrap(doc)

                if case.type and case.type != "task":
                    continue
    
                parent = None
                if case.indices:
                    parent_id = case.indices[0].referenced_id
                    try:
                        parent = CommCareCase.get(parent_id)
                    except ResourceNotFound:
                        parent = MissingParent(get_id=parent_id, owner_id='Parent Missing')

                case_row = CaseRow(case, parent)

                if case.type != 'task':
                    if case.user_id == MOTECH_ID:
                        case_row.update_type('task')

                if parent and not isinstance(parent, MissingParent) and parent.owner_id != case.owner_id:
                    case_row.update_owner(parent.owner_id)

                if case_row.save:
                    csv_file.writerow(case_row.to_row())
                    to_save.append(case_row.case)

                if len(to_save) > 100:
                    CommCareCase.get_db().bulk_save(to_save)
                    to_save = []

                if i % 100 == 0:
                    logger.info("{current}/{count} cases completed".format(current=i, count=len(case_ids)))

            if to_save:
                CommCareCase.get_db().bulk_save(to_save)
Example #14
0
    def testDocTypeCheck(self):
        id = uuid.uuid4().hex
        CommCareCase.get_db().save_doc({
            "_id": id,
            "doc_type": "AintNoCasesHere"
        })
        doc_back = CommCareCase.get_db().get(id)
        self.assertEqual("AintNoCasesHere", doc_back['doc_type'])

        cache = CaseDbCacheCouch()
        try:
            cache.get(id)
            self.fail('doc type security check failed to raise exception')
        except IllegalCaseId:
            pass
Example #15
0
def filter_cases_modified_elsewhere_since_sync(cases, last_sync):
    # this function is pretty ugly and is heavily optimized to reduce the number
    # of queries to couch.
    if not last_sync:
        return cases
    else:
        case_ids = [case['_id'] for case in cases]
        case_log_map = CommCareCase.get_db().view(
            'phone/cases_to_sync_logs',
            keys=case_ids,
            reduce=False,
        )
        # incoming format is a list of objects that look like this:
        # {
        #   'value': '[log id]',
        #   'key': '[case id]',
        # }
        unique_combinations = set((row['key'], row['value']) for row in case_log_map)
        modification_dates = CommCareCase.get_db().view(
            'phone/case_modification_status',
            keys=[list(combo) for combo in unique_combinations],
            reduce=True,
            group=True,
        )
        # we'll build a structure that looks like this for efficiency:
        # { case_id: [{'token': 'token value', 'date': 'date value'}, ...]}
        all_case_updates_by_sync_token = defaultdict(list)
        for row in modification_dates:
            # incoming format is a list of objects that look like this:
            # {
            #   'value': '2012-08-22T08:55:14Z', (most recent date updated)
            #   'key': ['[case id]', '[sync token id]']
            # }
            if row['value']:
                all_case_updates_by_sync_token[row['key'][0]].append(
                    {'token': row['key'][1], 'date': datetime.strptime(row['value'], '%Y-%m-%dT%H:%M:%SZ')}
                )

        def case_modified_elsewhere_since_sync(case_id):
            # NOTE: uses closures
            return any([row['date'] >= last_sync.date and row['token'] != last_sync._id
                        for row in all_case_updates_by_sync_token[case_id]])

        def relevant(case):
            case_id = case['_id']
            return case_modified_elsewhere_since_sync(case_id) or not last_sync.phone_is_holding_case(case_id)

        return filter(relevant, cases)
Example #16
0
def get_case_ids_in_domain(domain, type=None):
    if type is None:
        type_keys = [[]]
    elif isinstance(type, (list, tuple)):
        soft_assert('skelly@{}'.format('dimagi.com'))(
            False, 'get_case_ids_in_domain called with typle / list arg for type'
        )
        type_keys = [[t] for t in type]
    elif isinstance(type, basestring):
        type_keys = [[type]]
    else:
        raise ValueError(
            "Argument type should be a string, tuple, or None: {!r}"
            .format(type)
        )
    return [
        res['id'] for type_key in type_keys
        for res in CommCareCase.get_db().view(
            'case_types_by_domain/view',
            startkey=[domain] + type_key,
            endkey=[domain] + type_key + [{}],
            reduce=False,
            include_docs=False,
        )
    ]
Example #17
0
def get_number_of_cases_in_domain_by_owner(domain, owner_id):
    res = (
        CommCareCase.get_db()
        .view("cases_by_owner/view", startkey=[domain, owner_id], endkey=[domain, owner_id, {}], reduce=True)
        .one()
    )
    return res["value"] if res else 0
Example #18
0
    def get(self, case_id):
        if not case_id:
            raise IllegalCaseId('case_id must not be empty')
        if case_id in self.cache:
            return self.cache[case_id]

        try:
            if self.strip_history:
                case_doc = CommCareCase.get_lite(case_id, wrap=self.wrap)
            elif self.lock:
                try:
                    case_doc, lock = CommCareCase.get_locked_obj(_id=case_id)
                except redis.RedisError:
                    case_doc = CommCareCase.get(case_id)
                else:
                    self.locks.append(lock)
            else:
                if self.wrap:
                    case_doc = CommCareCase.get(case_id)
                else:
                    case_doc = CommCareCase.get_db().get(case_id)
        except ResourceNotFound:
            return None

        self.validate_doc(case_doc)
        self.cache[case_id] = case_doc
        return case_doc
Example #19
0
def get_deleted_case_ids_by_owner(owner_id):
    return [r["id"] for r in CommCareCase.get_db().view(
        'deleted_data/deleted_cases_by_user',
        startkey=[owner_id],
        endkey=[owner_id, {}],
        reduce=False,
    )]
Example #20
0
 def __init__(self, pillow_checkpoint_id=UCR_CHECKPOINT_ID):
     # todo: this will need to not be hard-coded if we ever split out forms and cases into their own domains
     couch_db = CachedCouchDB(CommCareCase.get_db().uri, readonly=False)
     checkpoint = PillowCheckpoint(pillow_checkpoint_id)
     super(ConfigurableIndicatorPillow, self).__init__(couch_db=couch_db, checkpoint=checkpoint)
     self.bootstrapped = False
     self.last_bootstrapped = datetime.utcnow()
Example #21
0
    def test_delete_with_related(self):
        factory = CaseFactory()
        parent = factory.create_case()
        [child] = factory.create_or_update_case(
            CaseStructure(attrs={'create': True}, walk_related=False, relationships=[
                CaseRelationship(CaseStructure(case_id=parent._id))
            ]),
        )
        # deleting the parent should not be allowed because the child still references it
        with self.assertRaises(CommCareCaseError):
            safe_hard_delete(parent)

        # deleting the child is ok
        safe_hard_delete(child)
        self.assertTrue(CommCareCase.get_db().doc_exist(parent._id))
        self.assertFalse(CommCareCase.get_db().doc_exist(child._id))
Example #22
0
def _view_shared(view_name, domain, location_id=None, skip=0, limit=100):
    extras = {"limit": limit} if limit else {}
    startkey = [domain, location_id] if location_id else [domain]
    endkey = copy(startkey) + [{}]
    return CommCareCase.get_db().view(
        view_name, startkey=startkey, endkey=endkey,
        reduce=False, skip=skip, **extras)
Example #23
0
 def setUp(self):
     # we have to set the fake database before any other calls
     self.orig_db = CommCareCase.get_db()
     self.database = FakeCouchDb()
     CommCareCase.set_db(self.database)
     self.spec = {
         "type": "related_doc",
         "related_doc_type": "CommCareCase",
         "doc_id_expression": {"type": "property_name", "property_name": "parent_id"},
         "value_expression": {"type": "property_name", "property_name": "related_property"},
     }
     self.expression = ExpressionFactory.from_spec(self.spec)
     self.nested_expression = ExpressionFactory.from_spec(
         {
             "type": "related_doc",
             "related_doc_type": "CommCareCase",
             "doc_id_expression": {"type": "property_name", "property_name": "parent_id"},
             "value_expression": {
                 "type": "related_doc",
                 "related_doc_type": "CommCareCase",
                 "doc_id_expression": {"type": "property_name", "property_name": "parent_id"},
                 "value_expression": {"type": "property_name", "property_name": "related_property"},
             },
         }
     )
Example #24
0
    def cases(self):
        if "debug_case" in self.request.GET:
            case = CommCareCase.get(self.request.GET["debug_case"])
            if case.domain != DOMAIN:
                raise Http404()
            return [case]

        query = (
            case_es.CaseES()
            .domain(self.domain)
            .exclude_source()
            .opened_range(lte=self.datespan.enddate_utc)
            .case_type(self.default_case_type)
        )
        query.index = "report_cases"

        if self.case_status == "open":
            query = query.filter(
                es_filters.OR(case_es.is_closed(False), case_es.closed_range(gte=self.datespan.enddate_utc))
            )
        elif self.case_status == "closed":
            query = query.filter(case_es.closed_range(lte=self.datespan.enddate_utc))

        query = query.owner([user["doc_id"] for user in self.users_matching_filter])

        result = query.run()

        return [CommCareCase.wrap(doc) for doc in iter_docs(CommCareCase.get_db(), result.doc_ids)]
Example #25
0
def get_call_center_cases(domain_name, case_type, user=None):
    base_key = ["open type owner", domain_name, case_type]
    if user:
        keys = [
            base_key + [owner_id]
            for owner_id in user.get_owner_ids()
        ]
    else:
        keys = [base_key]

    all_cases = []
    for key in keys:
        rows = paginate_view(
            CommCareCase.get_db(),
            'case/all_cases',
            chunk_size=10,
            startkey=key,
            endkey=key + [{}],
            reduce=False,
            include_docs=True
        )
        for row in rows:
            hq_user_id = row['doc'].get('hq_user_id', None)
            if hq_user_id:
                all_cases.append(CallCenterCase(
                    case_id=row['id'],
                    hq_user_id=hq_user_id
                ))

    return all_cases
    def handle_one(self, domain, case_type, chunk_size):
        self.log('Copying {case_type} cases in {domain}'
                 .format(case_type=case_type, domain=domain))
        old_db = CommCareCase.get_db()
        new_db = IndicatorCase.get_db()
        assert old_db.uri != new_db.uri
        # this dbaccessor pulls from old_db
        case_ids = get_case_ids_in_domain(domain, case_type)
        self.delete_bad_doc_types(case_ids, chunk_size)
        case_dict_chunks = chunked(iter_docs(old_db, case_ids, chunk_size),
                                   chunk_size)

        for case_dicts in case_dict_chunks:
            for case_dict in case_dicts:
                del case_dict['_rev']
                case_dict.pop('_attachments', None)
                case_dict['doc_type'] = "IndicatorCase"
            try:
                results = new_db.bulk_save(case_dicts)
            except BulkSaveError as error:
                results = error.results
            for result in results:
                if result.get('error') == 'conflict':
                    self.log('- OK: [{id}] is already in the indicator db'
                             .format(id=result.get('id')))
                elif 'error' in result:
                    self.log('- ERROR: [{id}] ({result})'.format(
                        id=result.get('id'),
                        result=json.dumps(result)
                    ))
                else:
                    self.log('- ADDED: [{id}] saved to indicator db'.format(
                        id=result.get('id')
                    ))
    def handle(self, *args, **options):
        domain, group_name = args
        group = Group.by_name(domain, name=group_name)
        owner_ids = get_all_owner_ids_from_group(group)
        pillow = CareBiharFluffPillow()
        db = CommCareCase.get_db()

        greenlets = []

        def process_case(case):
            pillow.change_transport(pillow.change_transform(case))


        for i, owner_id in enumerate(owner_ids):
            print '{0}/{1} owner_ids'.format(i, len(owner_ids))
            rows = CommCareCase.view(
                'hqcase/by_owner',
                startkey=[domain, owner_id],
                endkey=[domain, owner_id, {}],
                reduce=False,
            ).all()
            case_ids = [row['id'] for row in rows]
            print '{0} case_ids'.format(len(case_ids))
            for case in iter_docs(db, case_ids):
                g = gevent.Greenlet.spawn(process_case, case)
                greenlets.append(g)
        gevent.joinall(greenlets)
Example #28
0
 def get_cases(case_ids, ordered=False):
     return [
         CommCareCase.wrap(doc) for doc in iter_docs(
             CommCareCase.get_db(),
             case_ids
         )
     ]
Example #29
0
 def __init__(self):
     # run_ptop never passes args to __init__ so make that explicit by not supporting any
     # todo: this will need to not be hard-coded if we ever split out forms and cases into their own domains
     couch_db = CachedCouchDB(CommCareCase.get_db().uri, readonly=False)
     super(ConfigurableIndicatorPillow, self).__init__(couch_db=couch_db)
     self.bootstrapped = False
     self.last_bootstrapped = datetime.utcnow()
Example #30
0
    def delete_all_cases(cls, domain=None):
        assert CommCareCase.get_db().dbname.startswith('test_')
        view_kwargs = {}
        if domain:
            view_kwargs = {
                'startkey': [domain],
                'endkey': [domain, {}],
            }

        cls._delete_all(
            CommCareCase.get_db(),
            'cases_by_server_date/by_server_modified_on',
            **view_kwargs
        )

        FormProcessorTestUtils.delete_all_sql_cases(domain)
Example #31
0
def get_indexed_case_ids(domain, case_ids):
    """
    Given a base list of case ids, gets all ids of cases they reference (parent and host cases)
    """
    from casexml.apps.case.models import CommCareCase
    keys = [[domain, case_id, 'index'] for case_id in case_ids]
    return [r['value']['referenced_id'] for r in CommCareCase.get_db().view(
        'case_indices/related',
        keys=keys,
        reduce=False,
    )]
Example #32
0
 def save_processed_models(cls,
                           processed_forms,
                           cases=None,
                           stock_result=None):
     docs = list(processed_forms) + (cases or [])
     docs = filter(None, docs)
     assert XFormInstance.get_db().uri == CommCareCase.get_db().uri
     with bulk_atomic_blobs(docs):
         XFormInstance.get_db().bulk_save(docs)
     if stock_result:
         stock_result.commit()
Example #33
0
 def cases(self):
     _assert = soft_assert('@'.join(['droberts', 'dimagi.com']))
     _assert(False, "I'm surprised GroupReferenceMixIn ever gets called!")
     case_ids = get_case_ids_in_domain_by_owner(
         self.domain, owner_id__in=self.all_owner_ids, closed=False)
     # really inefficient, but can't find where it's called
     # and this is what it was doing before
     return [
         CommCareCase.wrap(doc)
         for doc in iter_docs(CommCareCase.get_db(), case_ids)
     ]
Example #34
0
def get_couch_domain_case_change_provider(domain):
    return CouchViewChangeProvider(
        couch_db=CommCareCase.get_db(),
        view_name='cases_by_owner/view',
        chunk_size=100,
        view_kwargs={
            'include_docs': True,
            'startkey': [domain],
            'endkey': [domain, {}, {}]
        }
    )
Example #35
0
 def get_all(self, domain):
     key = [domain, self.case_type or {}, {}]
     view_name = 'hqcase/open_cases' if self.status == CASE_STATUS_OPEN else 'hqcase/all_cases'
     view_results = CommCareCase.get_db().view(
         view_name,
         startkey=key,
         endkey=key + [{}],
         include_docs=False,
         reduce=False,
     )
     ids = [res["id"] for res in view_results]
     return self._case_results(ids)
Example #36
0
def _get_case_ids(domain, owner_id, is_closed):
    from casexml.apps.case.models import CommCareCase
    if is_closed is None:
        key = [domain, owner_id]
    else:
        key = [domain, owner_id, is_closed]

    return [row['id'] for row in CommCareCase.get_db().view(
        'hqcase/by_owner',
        reduce=False,
        key=key,
    )]
Example #37
0
def get_number_of_cases_in_domain_of_type(domain, case_type):
    warnings.warn(
        'get_number_of_cases_in_domain_of_type works off couch '
        'and thus is not suitable for use on SQL domains', DeprecationWarning)
    type_key = [case_type] if case_type else []
    row = CommCareCase.get_db().view(
        "case_types_by_domain/view",
        startkey=[domain] + type_key,
        endkey=[domain] + type_key + [{}],
        reduce=True,
    ).one()
    return row["value"] if row else 0
def get_last_modified_dates(domain, case_ids):
    """
    Given a list of case IDs, return a dict where the ids are keys and the
    values are the last server modified date of that case.
    """
    keys = [[domain, case_id] for case_id in case_ids]
    return dict([(row['id'], iso_string_to_datetime(row['value']))
                 for row in CommCareCase.get_db().view(
                     'cases_by_server_date/by_server_modified_on',
                     keys=keys,
                     include_docs=False,
                     reduce=False)])
Example #39
0
    def get_schedule(cls, chw_username, override_date=None):
        """
        Generate schedule object for a given username
        """
        cached_schedules = None

        if override_date == None:
            nowdate = datetime.utcnow()
        else:
            nowdate = override_date

        day_intervaltree = {}
        if cached_schedules == None:
            #no documents, then we need to load them up
            db = CommCareCase.get_db()
            chw_schedules = db.view('pact/chw_dot_schedules',
                                    key=chw_username).all()
            to_cache = []
            for item in chw_schedules:
                single_sched = item['value']
                to_cache.append(single_sched)
            cache.set("%s_schedule" % (chw_username), json.dumps(to_cache),
                      3600)
            cached_arr = to_cache
        else:
            cached_arr = json.loads(cached_schedules)

        for single_sched in cached_arr:
            day_of_week = int(single_sched['day_of_week'])
            if day_of_week in day_intervaltree:
                daytree = day_intervaltree[day_of_week]
            else:
                #if there's no day of week indication for this, then it's just a null interval node.  To start this node, we make it REALLY old.
                daytree = IntervalNode(
                    get_seconds(datetime.min),
                    get_seconds(nowdate + timedelta(days=10)))
            if single_sched['ended_date'] == None:
                enddate = nowdate + timedelta(days=9)
            else:
                enddate = iso_string_to_datetime(single_sched['ended_date'])

            startdate = iso_string_to_datetime(single_sched['active_date'])
            case_id = single_sched['case_id']
            if 'error' in single_sched:
                #this is a non-showstopping issue due to quirks with older submissions
                logging.error("Error, no pactid: %s" % single_sched['error'])

            daytree.insert(get_seconds(startdate),
                           get_seconds(enddate),
                           other=case_id)
            day_intervaltree[day_of_week] = daytree
        return cls(chw_username, day_intervaltree, cached_arr)
Example #40
0
def doc_to_change(doc):
    return Change(id=doc['_id'],
                  sequence_id='0',
                  document=doc,
                  metadata=ChangeMeta(
                      document_id=doc['_id'],
                      data_source_type=data_sources.COUCH,
                      data_source_name=CommCareCase.get_db().dbname,
                      document_type=doc['doc_type'],
                      document_subtype=doc.get('type'),
                      domain=doc['domain'],
                      is_deletion=False,
                  ))
Example #41
0
def get_all_case_owner_ids(domain):
    """
    Get all owner ids that are assigned to cases in a domain.
    """
    from casexml.apps.case.models import CommCareCase
    key = [domain]
    submitted = CommCareCase.get_db().view(
        'cases_by_owner/view',
        group_level=2,
        startkey=key,
        endkey=key + [{}],
    ).all()
    return set([row['key'][1] for row in submitted])
Example #42
0
    def test_series(self):
        self.assertEqual(CommCareCase.get_db(), self.normal_db)
        self.assertEqual(CommCareCase.get_db, self.normal_get_db)

        with OverrideDB(CommCareCase, self.other_db_1):
            self.assertEqual(CommCareCase.get_db(), self.other_db_1)
            self.assertNotEqual(CommCareCase.get_db(), self.normal_db)
            self.assertNotEqual(CommCareCase.get_db(), self.normal_get_db)

        self.assertEqual(CommCareCase.get_db(), self.normal_db)
        self.assertEqual(CommCareCase.get_db, self.normal_get_db)

        with OverrideDB(CommCareCase, self.other_db_2):
            self.assertEqual(CommCareCase.get_db(), self.other_db_2)
            self.assertNotEqual(CommCareCase.get_db(), self.normal_db)
            self.assertNotEqual(CommCareCase.get_db(), self.normal_get_db)

        self.assertEqual(CommCareCase.get_db(), self.normal_db)
        self.assertEqual(CommCareCase.get_db, self.normal_get_db)
Example #43
0
def get_case_types_for_domain(domain):
    key = [domain]
    rows = CommCareCase.get_db().view(
        'hqcase/types_by_domain',
        startkey=key,
        endkey=key + [{}],
        group_level=2,
    ).all()
    case_types = []
    for row in rows:
        _, case_type = row['key']
        if case_type:
            case_types.append(case_type)
    return case_types
def get_case_ids_modified_with_owner_since(domain, owner_id, reference_date):
    """
    Gets all cases with a specified owner ID that have been modified
    since a particular reference_date (using the server's timestamp)
    """
    return [
        row['id'] for row in CommCareCase.get_db().view(
            'cases_by_server_date/by_owner_server_modified_on',
            startkey=[domain, owner_id, json_format_datetime(reference_date)],
            endkey=[domain, owner_id, {}],
            include_docs=False,
            reduce=False
        )
    ]
Example #45
0
class TestEnsureDocumentExists(TestCase):
    domain = 'ensure-domain'

    def _create_change(self):
        case = CommCareCase(domain=self.domain)
        case.save()

        change = Change(case._id,
                        'seq',
                        document_store=CouchDocumentStore(
                            CommCareCase.get_db(),
                            self.domain,
                        ))
        return change, case
Example #46
0
def check_reportcase_es_index(doc_id=None, interval=10):
    do_check = False
    for domain in settings.ES_CASE_FULL_INDEX_DOMAINS:
        domain_doc = Domain.get_by_name(domain)
        if domain_doc is not None:
            do_check = True
            break

    if do_check:
        db = CommCareCase.get_db()
        es_index = REPORT_CASE_INDEX
        check_doc_id = doc_id if doc_id else _get_latest_doc_from_index(es_index, sort_field='opened_on')
        return check_index_by_doc(es_index, db, check_doc_id, interval=interval)
    else:
        return {}
Example #47
0
    def get_pact_cases(cls):
        # query couch to get reduce count of all PACT cases
        case_es = ReportCaseES(PACT_DOMAIN)
        total_count = CommCareCase.get_db().view('hqcase/types_by_domain',
                                                 key=["pact", PACT_CASE_TYPE]).first().get('value', 100)
        fields = ['_id', 'name', 'pactid.#value']
        query = case_es.base_query(terms={'type': PACT_CASE_TYPE},
                                   fields=fields,
                                   start=0,
                                   size=total_count)
        query['filter']['and'].append({"prefix": {"dot_status.#value": "dot"}})

        results = case_es.run_query(query)
        for res in results['hits']['hits']:
            yield res['fields']
Example #48
0
 def save_processed_models(cls,
                           processed_forms,
                           cases=None,
                           stock_result=None):
     docs = list(processed_forms)
     for form in docs:
         if form:
             form.server_modified_on = datetime.datetime.utcnow()
     docs += (cases or [])
     docs = [_f for _f in docs if _f]
     assert XFormInstance.get_db().uri == CommCareCase.get_db().uri
     with bulk_atomic_blobs(docs):
         XFormInstance.get_db().bulk_save(docs)
     if stock_result:
         stock_result.commit()
Example #49
0
def get_n_case_ids_in_domain_by_owner(domain, owner_id, n,
                                      start_after_case_id=None):
    view_kwargs = {}
    if start_after_case_id:
        view_kwargs['startkey_docid'] = start_after_case_id
        view_kwargs['skip'] = 1

    return [row['id'] for row in CommCareCase.get_db().view(
        "hqcase/by_owner",
        reduce=False,
        startkey=[domain, owner_id, False],
        endkey=[domain, owner_id, False],
        limit=n,
        **view_kwargs
    )]
Example #50
0
 def _get_couch_case_data(run_config):
     for couch_domain in _get_matching_couch_domains(run_config):
         iterator = paginate_view(
             CommCareCase.get_db(),
             'cases_by_server_date/by_server_modified_on',
             chunk_size=1000,
             startkey=[couch_domain],
             endkey=[couch_domain, {}],
             include_docs=False,
             reduce=False,
         )
         for row in iterator:
             case_id, modified_on = row['id'], iso_string_to_datetime(
                 row['value'])
             if run_config.start_date <= modified_on < run_config.end_date:
                 yield case_id, 'COUCH_TYPE_NOT_SUPPORTED', modified_on, couch_domain
Example #51
0
    def get_payload(self):
        response = self.restore_state.restore_class()
        case_ids_to_sync = set()
        for owner_id in self.restore_state.owner_ids:
            case_ids_to_sync = case_ids_to_sync | set(self.get_case_ids_for_owner(owner_id))

        if (not self.restore_state.is_initial and
                any([not self.is_clean(owner_id) for owner_id in self.restore_state.owner_ids])):
            # if it's a steady state sync and we have any dirty owners, then we also need to
            # include ALL cases on the phone that have been modified since the last sync as
            # possible candidates to sync (since they may have been closed or reassigned by someone else)

            # don't bother checking ones we've already decided to check
            other_ids_to_check = self.restore_state.last_sync_log.case_ids_on_phone - case_ids_to_sync
            case_ids_to_sync = case_ids_to_sync | set(filter_cases_modified_since(
                self.restore_state.domain, list(other_ids_to_check), self.restore_state.last_sync_log.date
            ))

        all_maybe_syncing = copy(case_ids_to_sync)
        all_synced = set()
        all_indices = defaultdict(set)
        all_dependencies_syncing = set()
        while case_ids_to_sync:
            ids = pop_ids(case_ids_to_sync, chunk_size)
            # todo: see if we can avoid wrapping - serialization depends on it heavily for now
            case_batch = filter(
                partial(case_needs_to_sync, last_sync_log=self.restore_state.last_sync_log),
                [CommCareCase.wrap(doc) for doc in get_docs(CommCareCase.get_db(), ids)]
            )
            updates = get_case_sync_updates(
                self.restore_state.domain, case_batch, self.restore_state.last_sync_log
            )
            for update in updates:
                case = update.case
                all_synced.add(case._id)
                append_update_to_response(response, update, self.restore_state)

                # update the indices in the new sync log
                if case.indices:
                    all_indices[case._id] = {index.identifier: index.referenced_id for index in case.indices}
                    # and double check footprint for non-live cases
                    for index in case.indices:
                        if index.referenced_id not in all_maybe_syncing:
                            case_ids_to_sync.add(index.referenced_id)

                if not _is_live(case, self.restore_state):
                    all_dependencies_syncing.add(case._id)
def handle_problematic_data(datalist_tup, csv_writer, verbose=False, rebuild=False):
    case_data = CommCareCase.get_db().view('_all_docs', keys=[d[1] for d in datalist_tup])
    cases = set([c["id"] for c in case_data if 'id' in c])
    for domain, case_id, form_id, received_on in datalist_tup:
        error = "action_missing" if case_id in cases else "nonexistent_case"
        csv_writer.writerow([domain, case_id, form_id, received_on, error])
        if verbose and error == "nonexistent_case":
            logger.info("Case (%s) from form (%s) does not exist" % (case_id, form_id))
        elif verbose and error == "action_missing":
            logger.info("Case (%s) missing action for form (%s)" % (case_id, form_id))
        if rebuild:
            if verbose:
                logger.info("rebuilding case (%s) from scratch" % case_id)
            try:
                rebuild_case(case_id)
            except Exception as e:
                logger.info("Case Rebuild Failure: %s" % e)
Example #53
0
def get_all_reverse_indices_info(domain, case_ids, relationship=None):
    from casexml.apps.case.models import CommCareCase

    def _row_to_index_info(row):
        return CaseIndexInfo(
            case_id=row['id'],
            identifier=row['value']['identifier'],
            referenced_id=row['key'][1],
            referenced_type=row['value']['referenced_type'],
            relationship=row['value']['relationship']
        )

    return list(map(_row_to_index_info, CommCareCase.get_db().view(
        'case_indices/related',
        keys=_get_keys_for_reverse_index_view(domain, case_ids, relationship),
        reduce=False,
    )))
Example #54
0
    def _get_couch_case_data(run_config):
        view_name = 'cases_by_server_date/by_server_modified_on'

        keys = [[couch_domain] for couch_domain in _get_matching_couch_domains(run_config)]
        if not keys:
            return

        iteration_key = f'couch_cases-{run_config.iteration_key}'
        event_handler = ProgressEventHandler(iteration_key, 'unknown', sys.stderr)
        iterable = resumable_view_iterator(
            CommCareCase.get_db(), iteration_key, view_name, keys,
            chunk_size=CHUNK_SIZE, view_event_handler=event_handler, full_row=True
        )
        for row in iterable:
            case_id, domain, modified_on = row['id'], row['key'][0], iso_string_to_datetime(row['value'])
            if run_config.start_date <= modified_on < run_config.end_date:
                yield case_id, 'COUCH_TYPE_NOT_SUPPORTED', modified_on, domain
Example #55
0
def db_comparisons(request):
    comparison_config = [
        {
            'description': 'Users (base_doc is "CouchUser")',
            'couch_db': CommCareUser.get_db(),
            'view_name': 'users/by_username',
            'es_query': UserES().remove_default_filter('active').size(0),
            'sql_rows': User.objects.count(),
        },
        {
            'description': 'Domains (doc_type is "Domain")',
            'couch_db': Domain.get_db(),
            'view_name': 'domain/by_status',
            'es_query': DomainES().size(0),
            'sql_rows': None,
        },
        {
            'description': 'Forms (doc_type is "XFormInstance")',
            'couch_db': XFormInstance.get_db(),
            'view_name': 'couchforms/by_xmlns',
            'es_query': FormES().remove_default_filter('has_xmlns')
                .remove_default_filter('has_user')
                .size(0),
            'sql_rows': FormData.objects.count(),
        },
        {
            'description': 'Cases (doc_type is "CommCareCase")',
            'couch_db': CommCareCase.get_db(),
            'view_name': 'case/by_owner',
            'es_query': CaseES().size(0),
            'sql_rows': None,
        }
    ]

    comparisons = []
    for comp in comparison_config:
        comparisons.append({
            'description': comp['description'],
            'couch_docs': comp['couch_db'].view(
                    comp['view_name'],
                    reduce=True,
                ).one()['value'],
            'es_docs': comp['es_query'].run().total,
            'sql_rows': comp['sql_rows'] if comp['sql_rows'] else 'n/a',
        })
    return json_response(comparisons)
Example #56
0
def cases_referenced_by_xform(xform):
    """
    JSON repr of XFormInstance -> [CommCareCase]
    """
    case_ids = get_case_ids_from_form(xform)

    cases = [
        CommCareCase.wrap(doc)
        for doc in iter_docs(CommCareCase.get_db(), case_ids)
    ]

    domain = get_and_check_xform_domain(xform)
    if domain:
        for case in cases:
            assert case.domain == domain

    return cases
Example #57
0
def get_case_types_for_domain(domain):
    """
    :return: set of case types
    """
    key = [domain]
    rows = CommCareCase.get_db().view(
        'case_types_by_domain/view',
        startkey=key,
        endkey=key + [{}],
        group_level=2,
    ).all()
    case_types = set()
    for row in rows:
        _, case_type = row['key']
        if case_type:
            case_types.add(case_type)
    return case_types
Example #58
0
 def report_context(self):
     context = super(CaseExportReport, self).report_context
     cases = CommCareCase.get_db().view("hqcase/types_by_domain",
         startkey=[self.domain],
         endkey=[self.domain, {}],
         reduce=True,
         group=True,
         group_level=2).all()
     groups = HQGroupExportConfiguration.by_domain(self.domain)
     context.update(
         case_types=[case['key'][1] for case in cases],
         group_exports=[group.case_exports for group in groups
             if group.case_exports],
         report_slug=self.slug,
     )
     context['case_format'] = self.request.GET.get('case_format') or 'csv'
     return context
Example #59
0
def scan_case(scanner_serial, scan_id):
    """
    Find the appropriate case for a serial/exam id combo.

    Throws an exception if there are more than one (this is
    an error that we do not expect to be able to make corrections
    for).
    """

    # this is shown on device and stored on the case with no leading zeroes
    # but has them on the file itself
    scan_id = scan_id.lstrip('0')

    return CommCareCase.get_db().view(
        'uth/uth_lookup',
        startkey=[UTH_DOMAIN, scanner_serial, scan_id],
        endkey=[UTH_DOMAIN, scanner_serial, scan_id, {}],
    ).one()
Example #60
0
    def __init__(self, case_id, attachments):
        _case_id = case_id

        self.old_db = CommCareCase.get_db()

        @classmethod
        def fetch_case_attachment(cls,
                                  case_id,
                                  attachment_key,
                                  fixed_size=None,
                                  **kwargs):
            if case_id == _case_id and attachment_key in attachments:
                return None, open(attachments[attachment_key])
            else:
                raise ResourceNotFound()

        self.old_fetch_case_attachment = CommCareCase.fetch_case_attachment
        self.fetch_case_attachment = fetch_case_attachment