def test_get_case_ids_in_domain_by_owner(self): self.assertEqual( set(get_case_ids_in_domain_by_owner(self.domain, owner_id='XXX')), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX'} ) self.assertEqual( set(get_case_ids_in_domain_by_owner( self.domain, owner_id__in=['XXX'])), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX'} ) self.assertEqual( set(get_case_ids_in_domain_by_owner(self.domain, owner_id='XXX', closed=False)), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX' and case.closed is False} ) self.assertEqual( set(get_case_ids_in_domain_by_owner(self.domain, owner_id='XXX', closed=True)), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX' and case.closed is True} )
def test_get_case_ids_in_domain_by_owner(self): self.assertEqual( set(get_case_ids_in_domain_by_owner(self.domain, owner_id='XXX')), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX'} ) self.assertEqual( set(get_case_ids_in_domain_by_owner( self.domain, owner_id__in=['XXX'])), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX'} ) self.assertEqual( set(get_case_ids_in_domain_by_owner(self.domain, owner_id='XXX', closed=False)), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX' and case.closed is False} ) self.assertEqual( set(get_case_ids_in_domain_by_owner(self.domain, owner_id='XXX', closed=True)), {case.get_id for case in self.cases if case.domain == self.domain and case.user_id == 'XXX' and case.closed is True} )
def cases(self): _assert = soft_assert('@'.join(['droberts', 'dimagi.com'])) _assert(False, "I'm surprised GroupReferenceMixIn ever gets called!") case_ids = get_case_ids_in_domain_by_owner( self.domain, owner_id__in=self.all_owner_ids, closed=False) # really inefficient, but can't find where it's called # and this is what it was doing before return [CommCareCase.wrap(doc) for doc in iter_docs(CommCareCase.get_db(), case_ids)]
def cases(self): _assert = soft_assert('@'.join(['droberts', 'dimagi.com'])) _assert(False, "I'm surprised GroupReferenceMixIn ever gets called!") case_ids = get_case_ids_in_domain_by_owner( self.domain, owner_id__in=self.all_owner_ids, closed=False) # really inefficient, but can't find where it's called # and this is what it was doing before return [CommCareCase.wrap(doc) for doc in iter_docs(CommCareCase.get_db(), case_ids)]
def handle(self, *args, **options): raise CommandError( 'copy_group_data is currently broken. ' 'Ask Danny or Ethan to fix it along the lines of ' 'https://github.com/dimagi/commcare-hq/pull/9180/files#diff-9d976dc051a36a028c6604581dfbce5dR95' ) if len(args) != 2: raise CommandError('Usage is copy_group_data %s' % self.args) sourcedb = Database(args[0]) group_id = args[1] exclude_user_owned = options["exclude_user_owned"] print 'getting group' group = Group.wrap(sourcedb.get(group_id)) group.save(force_update=True) print 'getting domain' domain = Domain.wrap( sourcedb.view('domain/domains', key=group.domain, include_docs=True, reduce=False, limit=1).one()['doc']) dt = DocumentTransform(domain._obj, sourcedb) save(dt, Domain.get_db()) owners = [group_id] if not exclude_user_owned: owners.extend(group.users) print 'getting case ids' with OverrideDB(CommCareCase, sourcedb): case_ids = get_case_ids_in_domain_by_owner(domain.name, owner_id__in=owners) xform_ids = set() print 'copying %s cases' % len(case_ids) for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)): print i * CHUNK_SIZE cases = [ CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs( keys=list(subset), include_docs=True, ) ] for case in cases:
def _move_no_longer_owned_cases_to_dependent_list_if_necessary(restore_state): if not restore_state.is_initial: removed_owners = ( set(restore_state.last_sync_log.owner_ids_on_phone) - set(restore_state.owner_ids) ) if removed_owners: # if we removed any owner ids, then any cases that belonged to those owners need # to be moved to the dependent list case_ids_to_try_purging = get_case_ids_in_domain_by_owner( domain=restore_state.domain, owner_id__in=list(removed_owners), ) for to_purge in case_ids_to_try_purging: if to_purge in restore_state.current_sync_log.case_ids_on_phone: restore_state.current_sync_log.dependent_case_ids_on_phone.add(to_purge)
def handle(self, *args, **options): if len(args) != 2: raise CommandError('Usage is copy_group_data %s' % self.args) sourcedb = Database(args[0]) group_id = args[1] exclude_user_owned = options["exclude_user_owned"] print 'getting group' group = Group.wrap(sourcedb.get(group_id)) group.save(force_update=True) print 'getting domain' domain = Domain.wrap( sourcedb.view('domain/domains', key=group.domain, include_docs=True, reduce=False, limit=1).one()['doc'] ) dt = DocumentTransform(domain._obj, sourcedb) save(dt, Domain.get_db()) owners = [group_id] if not exclude_user_owned: owners.extend(group.users) print 'getting case ids' with OverrideDB(CommCareCase, sourcedb): case_ids = get_case_ids_in_domain_by_owner( domain.name, owner_id__in=owners) xform_ids = set() print 'copying %s cases' % len(case_ids) for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)): print i * CHUNK_SIZE cases = [CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs( keys=list(subset), include_docs=True, )] for case in cases:
def get_owned(self, user_id): try: user = CouchUser.get_by_user_id(user_id, self.domain) except KeyError: user = None try: owner_ids = user.get_owner_ids() except AttributeError: owner_ids = [user_id] closed = { CASE_STATUS_OPEN: False, CASE_STATUS_CLOSED: True, CASE_STATUS_ALL: None, }[self.status] ids = get_case_ids_in_domain_by_owner( self.domain, owner_id__in=owner_ids, closed=closed) return self._case_results(ids)
def handle(self, *args, **options): domain, group_name = args group = Group.by_name(domain, name=group_name) owner_ids = get_all_owner_ids_from_group(group) pillow = CareBiharFluffPillow() db = CommCareCase.get_db() greenlets = [] def process_case(case): pillow.change_transport(pillow.change_transform(case)) for i, owner_id in enumerate(owner_ids): print '{0}/{1} owner_ids'.format(i, len(owner_ids)) case_ids = get_case_ids_in_domain_by_owner(domain, owner_id=owner_id) print '{0} case_ids'.format(len(case_ids)) for case in iter_docs(db, case_ids): g = gevent.Greenlet.spawn(process_case, case) greenlets.append(g) gevent.joinall(greenlets)
def get_owned(self, user_id): try: user = CouchUser.get_by_user_id(user_id, self.domain) except KeyError: user = None try: owner_ids = user.get_owner_ids() except AttributeError: owner_ids = [user_id] closed = { CASE_STATUS_OPEN: False, CASE_STATUS_CLOSED: True, CASE_STATUS_ALL: None, }[self.status] ids = get_case_ids_in_domain_by_owner(self.domain, owner_id__in=owner_ids, closed=closed) return self._case_results(ids)
def handle(self, *args, **options): domain, group_name = args group = Group.by_name(domain, name=group_name) owner_ids = get_all_owner_ids_from_group(group) pillow = CareBiharFluffPillow() db = CommCareCase.get_db() greenlets = [] def process_case(case): pillow.change_transport(pillow.change_transform(case)) for i, owner_id in enumerate(owner_ids): print '{0}/{1} owner_ids'.format(i, len(owner_ids)) case_ids = get_case_ids_in_domain_by_owner( domain, owner_id=owner_id) print '{0} case_ids'.format(len(case_ids)) for case in iter_docs(db, case_ids): g = gevent.Greenlet.spawn(process_case, case) greenlets.append(g) gevent.joinall(greenlets)
def get_case_ids_in_domain_by_owners(domain, owner_ids, closed=None): return get_case_ids_in_domain_by_owner(domain, owner_id__in=owner_ids, closed=closed)
def explode_cases(user_id, domain, factor, task=None): user = CommCareUser.get_by_user_id(user_id, domain) messages = list() if task: DownloadBase.set_progress(explode_case_task, 0, 0) count = 0 old_to_new = dict() child_cases = list() case_ids = get_case_ids_in_domain_by_owner( domain, owner_id__in=user.get_owner_ids(), closed=False) cases = (CommCareCase.wrap(doc) for doc in iter_docs(CommCareCase.get_db(), case_ids)) # copy parents for case in cases: # save children for later if case.indices: child_cases.append(case) continue old_to_new[case._id] = list() for i in range(factor - 1): new_case_id = uuid.uuid4().hex # add new parent ids to the old to new id mapping old_to_new[case._id].append(new_case_id) submit_case(case, new_case_id, domain) count += 1 if task: DownloadBase.set_progress(explode_case_task, count, 0) max_iterations = len(child_cases) ** 2 iterations = 0 while len(child_cases) > 0: if iterations > max_iterations: raise Exception('cases had inconsistent references to each other') iterations += 1 # take the first case case = child_cases.pop(0) can_process = True parent_ids = dict() for index in case.indices: ref_id = index.referenced_id # if the parent hasn't been processed if ref_id not in old_to_new.keys(): # append it to the backand break out child_cases.append(case) can_process = False break # update parent ids that this case needs parent_ids.update({ref_id: old_to_new[ref_id]}) # keep processing if not can_process: continue old_to_new[case._id] = list() for i in range(factor - 1): # grab the parents for this round of exploding parents = {k: v[i] for k, v in parent_ids.items()} new_case_id = uuid.uuid4().hex old_to_new[case._id].append(new_case_id) submit_case(case, new_case_id, domain, parents) count += 1 if task: DownloadBase.set_progress(explode_case_task, count, 0) messages.append("All of %s's cases were exploded by a factor of %d" % (user.raw_username, factor)) return {'messages': messages}
def get_case_ids_in_domain_by_owners(domain, owner_ids, closed=None): return get_case_ids_in_domain_by_owner(domain, owner_id__in=owner_ids, closed=closed)
def explode_cases(user_id, domain, factor, task=None): user = CommCareUser.get_by_user_id(user_id, domain) messages = list() if task: DownloadBase.set_progress(explode_case_task, 0, 0) count = 0 old_to_new = dict() child_cases = list() case_ids = get_case_ids_in_domain_by_owner( domain, owner_id__in=user.get_owner_ids(), closed=False) cases = (CommCareCase.wrap(doc) for doc in iter_docs(CommCareCase.get_db(), case_ids)) # copy parents for case in cases: # save children for later if case.indices: child_cases.append(case) continue old_to_new[case._id] = list() for i in range(factor - 1): new_case_id = uuid.uuid4().hex # add new parent ids to the old to new id mapping old_to_new[case._id].append(new_case_id) submit_case(case, new_case_id, domain) count += 1 if task: DownloadBase.set_progress(explode_case_task, count, 0) max_iterations = len(child_cases)**2 iterations = 0 while len(child_cases) > 0: if iterations > max_iterations: raise Exception('cases had inconsistent references to each other') iterations += 1 # take the first case case = child_cases.pop(0) can_process = True parent_ids = dict() for index in case.indices: ref_id = index.referenced_id # if the parent hasn't been processed if ref_id not in old_to_new.keys(): # append it to the backand break out child_cases.append(case) can_process = False break # update parent ids that this case needs parent_ids.update({ref_id: old_to_new[ref_id]}) # keep processing if not can_process: continue old_to_new[case._id] = list() for i in range(factor - 1): # grab the parents for this round of exploding parents = {k: v[i] for k, v in parent_ids.items()} new_case_id = uuid.uuid4().hex old_to_new[case._id].append(new_case_id) submit_case(case, new_case_id, domain, parents) count += 1 if task: DownloadBase.set_progress(explode_case_task, count, 0) messages.append("All of %s's cases were exploded by a factor of %d" % (user.raw_username, factor)) return {'messages': messages}
def delete_all(self): case_ids = get_case_ids_in_domain_by_owner(self.domain, self.user.user_id) iter_bulk_delete(self.db, case_ids)
def delete_all(self): case_ids = get_case_ids_in_domain_by_owner( self.domain, self.user.user_id) iter_bulk_delete(self.db, case_ids)
def get_case_ids_in_domain_by_owners(domain, owner_ids): return get_case_ids_in_domain_by_owner(domain, owner_id__in=owner_ids)
def handle(self, *args, **options): raise CommandError( 'copy_group_data is currently broken. ' 'Ask Danny or Ethan to fix it along the lines of ' 'https://github.com/dimagi/commcare-hq/pull/9180/files#diff-9d976dc051a36a028c6604581dfbce5dR95' ) if len(args) != 2: raise CommandError('Usage is copy_group_data %s' % self.args) sourcedb = Database(args[0]) group_id = args[1] exclude_user_owned = options["exclude_user_owned"] print 'getting group' group = Group.wrap(sourcedb.get(group_id)) group.save(force_update=True) print 'getting domain' domain = Domain.wrap( sourcedb.view('domain/domains', key=group.domain, include_docs=True, reduce=False, limit=1).one()['doc'] ) dt = DocumentTransform(domain._obj, sourcedb) save(dt, Domain.get_db()) owners = [group_id] if not exclude_user_owned: owners.extend(group.users) print 'getting case ids' with OverrideDB(CommCareCase, sourcedb): case_ids = get_case_ids_in_domain_by_owner( domain.name, owner_id__in=owners) xform_ids = set() print 'copying %s cases' % len(case_ids) for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)): print i * CHUNK_SIZE cases = [CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs( keys=list(subset), include_docs=True, )] for case in cases: xform_ids.update(case.xform_ids) self.lenient_bulk_save(CommCareCase, cases) if not exclude_user_owned: # also grab submissions that may not have included any case data for user_id in group.users: xform_ids.update(res['id'] for res in sourcedb.view( 'all_forms/view', startkey=['submission user', domain.name, user_id], endkey=['submission user', domain.name, user_id, {}], reduce=False )) print 'copying %s xforms' % len(xform_ids) user_ids = set(group.users) def form_wrapper(row): doc = row['doc'] doc.pop('_attachments', None) doc.pop('external_blobs', None) return XFormInstance.wrap(doc) for i, subset in enumerate(chunked(xform_ids, CHUNK_SIZE)): print i * CHUNK_SIZE xforms = sourcedb.all_docs( keys=list(subset), include_docs=True, wrapper=form_wrapper, ).all() self.lenient_bulk_save(XFormInstance, xforms) for xform in xforms: user_id = xform.metadata.userID user_ids.add(user_id) print 'copying %s users' % len(user_ids) def wrap_user(row): try: doc = row['doc'] except KeyError: logging.exception('trouble with user result %r' % row) return None try: return CouchUser.wrap_correctly(doc) except Exception: logging.exception('trouble with user %s' % doc['_id']) return None users = sourcedb.all_docs( keys=list(user_ids), include_docs=True, wrapper=wrap_user, ).all() role_ids = set([]) for user in filter(lambda u: u is not None, users): # if we use bulk save, django user doesn't get sync'd domain_membership = user.get_domain_membership(domain.name) if domain_membership and domain_membership.role_id: role_ids.add(user.domain_membership.role_id) user.save(force_update=True) print 'copying %s roles' % len(role_ids) for i, subset in enumerate(chunked(role_ids, CHUNK_SIZE)): roles = [UserRole.wrap(role['doc']) for role in sourcedb.all_docs( keys=list(subset), include_docs=True, )] self.lenient_bulk_save(UserRole, roles) if options['include_sync_logs']: print 'copying sync logs' for user_id in user_ids: log_ids = [res['id'] for res in sourcedb.view("phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, reduce=False, include_docs=True )] print 'user: %s, logs: %s' % (user_id, len(log_ids)) for i, subset in enumerate(chunked(log_ids, CHUNK_SIZE)): print i * CHUNK_SIZE logs = [SyncLog.wrap(log['doc']) for log in sourcedb.all_docs( keys=list(subset), include_docs=True, )] self.lenient_bulk_save(SyncLog, logs)