def handle(self, child_file, **options):
     relevant_districts = SQLLocation.objects.filter(domain='icds-cas',
                                                     location_id__in=['d982a6fb4cca0824fbde59db18d2d422',
                                                                      '0ffe4a1f110ffc17bb9b749abdfd697c'])
     owners = SQLLocation.objects.get_queryset_descendants(relevant_districts, include_self=True)
     owner_name_mapping = {loc.location_id: loc.name for loc in owners}
     hh_cases = self._get_closed_hh_cases(list(owner_name_mapping))
     with open(child_file, 'w', encoding='utf-8') as child_csv:
         child_writer = csv.writer(child_csv)
         child_writer.writerow(CSV_HEADERS)
         for cases in chunked(with_progress_bar(hh_cases, hh_cases.count), 500):
             household_ids = []
             hh_map = {}
             for hh in cases:
                 hh_map[hh['case_id']] = (hh['name'].encode('utf-8'), hh.get('closed_on', '').encode('utf-8'))
                 household_ids.append(hh['case_id'])
             child_cases = self._get_child_cases(household_ids)
             ids = set(household_ids)
             for child in child_cases.hits:
                 parent_index = filter(
                     lambda index: index['referenced_id'] in ids and index['identifier'] == 'parent',
                     child['indices']
                 )[0]
                 if parent_index:
                     hh_id = parent_index['referenced_id']
                     row = [child.get(prop, '').encode('utf-8') for prop in CHILD_PROPERTIES]
                     row.append(owner_name_mapping.get(child.get('owner_id', ''), '').encode('utf-8'))
                     hh_info = (hh_id, hh_map[hh_id][0], hh_map[hh_id][1])
                     row.extend(hh_info)
                     child_writer.writerow(row)
示例#2
0
 def iter_attachments(self):
     docs = get_all_docs_with_doc_types(Application.get_db(),
                                        apps_migration.doc_types)
     for doc in with_progress_bar(docs, length=self.docs_count):
         if '_attachments' in doc:
             for filename, info in doc['_attachments'].items():
                 yield doc, filename, info
    def handle(self, domain, data_source_id, *args, **kwargs):
        config, _ = get_datasource_config(data_source_id, domain)
        adapter = get_indicator_adapter(config)
        q = adapter.get_query_object()
        document_store = get_document_store_for_doc_type(domain, config.referenced_doc_type)
        bad_rows = []
        for row in with_progress_bar(q, length=q.count()):
            doc_id = row.doc_id
            doc = document_store.get_document(doc_id)

            current_rows = config.get_all_values(doc)
            if len(current_rows) > 1:
                raise ValueError("this command doesn't work for datasources returning multiple rows per doc")

            try:
                current_row = current_rows[0]
            except KeyError:
                continue

            # don't compare the 'inserted_at' columns
            current_row = [val for val in current_row if val.column.database_column_name != 'inserted_at']

            for val in current_row:
                try:
                    inserted_value = getattr(row, val.column.database_column_name)
                    if (inserted_value != val.value
                       or row.inserted_at.replace(tzinfo=pytz.utc) < parse_datetime(doc['server_modified_on'])):
                        bad_rows.append({
                            'doc_id': row.doc_id,
                            'column_name': val.column.database_column_name,
                            'inserted_at': row.inserted_at.isoformat(),
                            'server_modified_on': doc['server_modified_on'],
                            'stored_value': getattr(row, val.column.database_column_name),
                            'desired_value': val.value,
                            'message': ('column mismatch'
                                        if inserted_value != val.value else "modified date early"),
                        })
                except AttributeError:
                    bad_rows.append({
                        'doc_id': row.doc_id,
                        'column_name': val.column.database_column_name,
                        'inserted_at': 'missing',
                        'server_modified_on': doc['server_modified_on'],
                        'stored_value': 'missing',
                        'desired_value': val.value,
                        'message': 'doc missing',
                    })

        filename = 'datasource_mismatches_{}_{}.csv'.format(
            data_source_id[-8:],
            datetime.utcnow().strftime("%Y-%m-%d-%H-%M-%S")
        )
        with open(filename, 'w', encoding='utf-8') as f:
            headers = ['doc_id', 'column_name', 'inserted_at', 'server_modified_on',
                       'stored_value', 'desired_value', 'message']
            writer = csv.DictWriter(f, headers)
            writer.writeheader()
            writer.writerows(bad_rows)

        print("Found {} mismatches. Check {} for more details".format(len(bad_rows), filename))
    def handle(self, path, **options):
        with_traceback = options['traceback']
        self.strict = options['strict']

        if not os.path.isfile(path):
            raise CommandError("Couldn't locate domain list: {}".format(path))

        self.stdout.ending = "\n"
        self.stderr.ending = "\n"
        with open(path, 'r', encoding='utf-8') as f:
            domains = [name.strip() for name in f.readlines() if name.strip()]

        failed = []
        self.stdout.write("Processing {} domains".format(len(domains)))
        for domain in with_progress_bar(domains, oneline=False):
            try:
                success, reason = self.migrate_domain(domain)
                if not success:
                    failed.append((domain, reason))
            except Exception as e:
                if with_traceback:
                    traceback.print_exc()
                self.stderr.write("Error migrating domain {}: {}".format(domain, e))
                self.abort(domain)
                failed.append((domain, e))

        if failed:
            self.stderr.write("Errors:")
            self.stderr.write(
                "\n".join(
                    ["{}: {}".format(domain, exc) for domain, exc in failed]))
        else:
            self.stdout.write("All migrations successful!")
    def handle(self, *args, **options):
        domain = options.get('domain')
        repeater_id = options.get('repeater_id')
        state = options.get('state')
        records_file_path = options.get('records_file_path')

        if records_file_path:
            self._load_record_ids_from_file(records_file_path)
            records = self.record_ids
            record_count = len(records)
        elif domain and repeater_id:
            records = iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=state)
            record_count = get_repeat_record_count(domain, repeater_id=repeater_id, state=state)
        else:
            raise CommandError("Insufficient Arguments")

        for record in with_progress_bar(records, length=record_count):
            if isinstance(record, str):
                record_id = record
                try:
                    record = RepeatRecord.get(record_id)
                except ResourceNotFound:
                    self.ws.append([record_id, '', 'Not Found'])
                    continue
            self._add_row(record)

        file_name = self._save_file(repeater_id, state)
        print("Report saved in file:{filename}".format(filename=file_name))
示例#6
0
    def handle(self, log_file, **options):
        self.domain = 'hki-nepal-suaahara-2'
        loc_mapping = {}
        locs = SQLLocation.objects.filter(domain=self.domain, level=4)
        for loc in locs:
            loc_mapping[loc.site_code] = loc.location_id

        failed_updates = []
        household_cases = CaseES().domain(self.domain).case_type('household').count()
        member_cases = CaseES().domain(self.domain).case_type('household_member').count()
        total_cases = household_cases + member_cases
        with open(log_file, "w", encoding='utf-8') as fh:
            fh.write('--------Successful Form Ids----------')
            for cases in chunked(with_progress_bar(self._get_cases_to_process(), total_cases), 100):
                cases_to_update = self._process_cases(cases, failed_updates, loc_mapping)
                try:
                    xform, cases = bulk_update_cases(
                        self.domain, cases_to_update, self.__module__)
                    fh.write(xform.form_id)
                except LocalSubmissionError as e:
                    print(six.text_type(e))
                    failed_updates.extend(case[0] for case in cases_to_update)
            fh.write('--------Failed Cases--------------')
            for case_id in failed_updates:
                fh.write(case_id)
 def handle(self, domain, log_file, **options):
     total_cases = CaseES().domain(domain).case_type('household').is_closed().count()
     self.case_accessor = CaseAccessors(domain)
     failed_updates = []
     with open(log_file, "w", encoding='utf-8') as fh:
         fh.write('--------Successful Form Ids----------\n')
         for cases in chunked(with_progress_bar(self._get_cases_to_process(domain), total_cases), 100):
             related_cases = self._get_related_cases(cases)
             case_tupes = [(case_id, {}, True) for case_id in related_cases]
             try:
                 xform, cases = bulk_update_cases(
                     domain, case_tupes, self.__module__)
                 fh.write(xform.form_id + '\n')
             except LocalSubmissionError as e:
                 print('submission error')
                 print(six.text_type(e))
                 failed_updates.extend(related_cases)
             except Exception as e:
                 print('unexpected error')
                 print(six.text_type(e))
                 failed_updates.extend(related_cases)
         fh.write('--------Failed Cases--------------\n')
         for case_id in failed_updates:
             fh.write(case_id)
         print('-------------COMPLETE--------------')
    def handle(self, domain, **options):
        verbose = options["verbose"] or options["dryrun"]

        succeeded = []
        failed = []
        error_messages = defaultdict(lambda: 0)
        problem_ids = self._get_form_ids(domain)
        prefix = "Processing: "
        form_iterator = FormAccessors(domain).iter_forms(problem_ids)
        if not verbose:
            form_iterator = with_progress_bar(form_iterator, len(problem_ids), prefix=prefix, oneline=False)
        for form in form_iterator:
            if verbose:
                print("%s\t%s\t%s\t%s" % (form.form_id, form.received_on, form.xmlns, form.problem.strip()))

            if not options["dryrun"]:
                try:
                    reprocess_xform_error(form)
                except Exception as e:
                    raise
                    failed.append(form.form_id)
                    error_messages[str(e)] += 1
                else:
                    succeeded.append(form.form_id)

        if not options["dryrun"]:
            print("%s / %s forms successfully processed, %s failures" %
                  (len(succeeded), len(succeeded) + len(failed), len(failed)))
            if error_messages:
                print("The following errors were seen: \n%s" %
                      ("\n".join("%s: %s" % (v, k) for k, v in error_messages.items())))
示例#9
0
def _delete_all_cases(domain_name):
    logger.info('Deleting cases...')
    case_accessor = CaseAccessors(domain_name)
    case_ids = case_accessor.get_case_ids_in_domain()
    for case_id_chunk in chunked(with_progress_bar(case_ids, stream=silence_during_tests()), 500):
        case_accessor.soft_delete_cases(list(case_id_chunk))
    logger.info('Deleting cases complete.')
    def handle(self, ids_file, **options):
        with open(ids_file, encoding='utf-8') as f:
            doc_ids = [line.strip() for line in f]
        total_doc_ids = len(doc_ids)
        doc_ids = set(doc_ids)
        print("{} total doc ids, {} unique".format(total_doc_ids, len(doc_ids)))

        db = XFormInstance.get_db()  # Both forms and cases are in here
        with IterDB(db) as iter_db:
            for doc in iter_docs(db, with_progress_bar(doc_ids)):
                iter_db.save(doc)

        print("{} docs saved".format(len(iter_db.saved_ids)))
        print("{} docs errored".format(len(iter_db.error_ids)))
        not_found = len(doc_ids) - len(iter_db.saved_ids) - len(iter_db.error_ids)
        print("{} docs not found".format(not_found))

        filename = '{}_{}.csv'.format(ids_file.split('/')[-1],
                                      datetime.datetime.now().isoformat())
        with open(filename, 'w', encoding='utf-8') as f:
            writer = csv.writer(f)
            writer.writerow(['doc_id', 'status'])
            for doc_id in doc_ids:
                if doc_id in iter_db.saved_ids:
                    status = "saved"
                elif doc_id in iter_db.error_ids:
                    status = "errored"
                else:
                    status = "not_found"
                writer.writerow([doc_id, status])

        print("Saved results to {}".format(filename))
示例#11
0
    def handle(self, domain, case_type, start_from_db=None, **options):
        print("Resyncing messaging models for %s/%s ..." % (domain, case_type))

        db_aliases = get_db_aliases_for_partitioned_query()
        db_aliases.sort()
        if start_from_db:
            if start_from_db not in db_aliases:
                raise CommandError("DB alias not recognized: %s" % start_from_db)

            index = db_aliases.index(start_from_db)
            db_aliases = db_aliases[index:]

        print("Iterating over databases: %s" % db_aliases)

        for db_alias in db_aliases:
            print("")
            print("Creating tasks for cases in %s ..." % db_alias)
            case_ids = list(
                CommCareCaseSQL
                .objects
                .using(db_alias)
                .filter(domain=domain, type=case_type, deleted=False)
                .values_list('case_id', flat=True)
            )
            for case_id in with_progress_bar(case_ids):
                sync_case_for_messaging.delay(domain, case_id)
示例#12
0
 def get_pending_cases(self):
     count = self.statedb.count_undiffed_cases()
     if not count:
         return []
     pending = self.statedb.iter_undiffed_case_ids()
     return with_progress_bar(pending,
                              count,
                              prefix="Pending case diffs",
                              oneline=False)
示例#13
0
 def handle(self, **options):
     roles = UserRole.view('users/roles_by_domain',
                           include_docs=False,
                           reduce=False).all()
     role_ids = [role['id'] for role in roles]
     iter_update(UserRole.get_db(),
                 _copy_permissions,
                 with_progress_bar(role_ids),
                 chunksize=1)
示例#14
0
def flag_legacy_child_module_domains():
    """Enable the LEGACY_CHILD_MODULES flag for domains that need it"""
    domains = set(toggles.BASIC_CHILD_MODULE.get_enabled_domains() +
                  toggles.APP_BUILDER_ADVANCED.get_enabled_domains())
    for domain in with_progress_bar(domains):
        for app in get_apps(domain):
            if has_misordered_modules(app):
                if needs_legacy_flag(app):
                    toggles.LEGACY_CHILD_MODULES.set(domain, True, toggles.NAMESPACE_DOMAIN)
示例#15
0
def _dump_docs(query, type_):
    print("Dumping {}".format(type_))

    total_docs = query.count()
    path, file = _get_file(type_)
    with file:
        for doc in with_progress_bar(query.size(500).scroll(), length=total_docs):
            file.write('{}\n'.format(json.dumps(doc)))
    return path
    def handle(self, username, **options):
        def update_username(event_dict):
            event_dict['user'] = new_username
            return DocUpdate(doc=event_dict)

        new_username = "******"
        event_ids = navigation_event_ids_by_user(username)
        iter_update(NavigationEventAudit.get_db(), update_username,
                    with_progress_bar(event_ids, len(event_ids)))
 def update_vouchers(self, voucher_updates):
     print("updating voucher cases")
     for chunk in chunked(with_progress_bar(voucher_updates), 100):
         updates = [
             (update.case_id, update.properties, False)
             for update in chunk
         ]
         if self.commit:
             bulk_update_cases(self.domain, updates, self.__module__)
 def with_progress(self, doc_type, iterable, count_key):
     couchdb = XFormInstance.get_db()
     return with_progress_bar(
         iterable,
         get_doc_count_in_domain_by_type(self.domain, doc_type, couchdb),
         prefix=f"Scanning {doc_type}",
         offset=self.counter.get(count_key),
         oneline="concise",
     )
示例#19
0
def _dump_docs(query, type_):
    print("Dumping {}".format(type_))

    total_docs = query.count()
    path, file = _get_file(type_)
    with file:
        for doc in with_progress_bar(query.size(500).scroll(), length=total_docs):
            file.write('{}\n'.format(json.dumps(doc)))
    return path
示例#20
0
    def handle(self, **options):
        records = UserHistory.objects.filter(
            Q(user_repr__isnull=True) | Q(changed_by_repr__isnull=True))

        for user_history in with_progress_bar(
                queryset_to_iterator(records, UserHistory), records.count()):
            try:
                migrate(user_history)
            except Exception as e:
                logging.error(f"{user_history.pk}: {e}")
示例#21
0
def _delete_all_forms(domain_name):
    logger.info('Deleting forms...')
    form_accessor = FormAccessors(domain_name)
    form_ids = list(itertools.chain(*[
        form_accessor.get_all_form_ids_in_domain(doc_type=doc_type)
        for doc_type in doc_type_to_state
    ]))
    for form_id_chunk in chunked(with_progress_bar(form_ids, stream=silence_during_tests()), 500):
        form_accessor.soft_delete_forms(list(form_id_chunk))
    logger.info('Deleting forms complete.')
示例#22
0
    def handle(self, domain, **options):
        # For all successful registration records
        # If any have an attempt that id "A patient with this beneficiary_id already exists"
        # Check the episode case. If this doesn't have "dots_99_registered" then set this property to "true"
        self.commit = options['commit']
        repeater_id = 'dc73c3da43d42acd964d80b287926833'  # 99dots register
        accessor = CaseAccessors(domain)
        existing_message = "A patient with this beneficiary_id already exists"
        count = get_repeat_record_count(domain, repeater_id, state="SUCCESS")
        records = iter_repeat_records_by_domain(domain, repeater_id, state="SUCCESS")

        cases_to_update = set()
        print("Filtering successful cases")
        for repeat_record in with_progress_bar(records, length=count):
            if any((existing_message in attempt.message if attempt.message is not None else "")
                   for attempt in repeat_record.attempts):
                try:
                    episode = accessor.get_case(repeat_record.payload_id)
                except CaseNotFound:
                    continue
                if episode.get_case_property('dots_99_registered') != 'true':
                    cases_to_update.add(episode)

        timestamp = datetime.utcnow().strftime("%Y-%m-%d-%H-%M-%S")
        with open('{}_set_99dots_to_registered.csv'.format(timestamp), 'w') as f:
            writer = csv.writer(f)
            writer.writerow([
                'beneficiary_id',
                'episode_id',
                'UpdatePatient Status',
                'Adherence Status',
                'TreatmentOutcome Status'
            ])
            print("Updating {} successful cases in 99DOTS".format(len(cases_to_update)))
            for case in with_progress_bar(cases_to_update):
                writer.writerow([
                    get_person_case_from_episode(domain, case.case_id).case_id,
                    case.case_id,
                    self.update_registered_status(domain, case),
                    self.update_patients(domain, case),
                    self.send_adherence(domain, case),
                    self.send_treatment_outcome(domain, case),
                ])
示例#23
0
 def _with_progress(self, doc_types, iterable, progress_name='Migrating'):
     if self.with_progress:
         doc_count = sum([
             get_doc_count_in_domain_by_type(self.domain, doc_type, XFormInstance.get_db())
             for doc_type in doc_types
         ])
         prefix = "{} ({})".format(progress_name, ', '.join(doc_types))
         return with_progress_bar(iterable, doc_count, prefix=prefix, oneline=False)
     else:
         return iterable
示例#24
0
def _delete_all_forms(domain_name):
    logger.info('Deleting forms...')
    form_accessor = FormAccessors(domain_name)
    form_ids = list(itertools.chain(*[
        form_accessor.get_all_form_ids_in_domain(doc_type=doc_type)
        for doc_type in doc_type_to_state
    ]))
    for form_id_chunk in chunked(with_progress_bar(form_ids, stream=silence_during_tests()), 500):
        form_accessor.soft_delete_forms(list(form_id_chunk))
    logger.info('Deleting forms complete.')
示例#25
0
 def _with_progress(self, doc_types, iterable, progress_name='Migrating'):
     if self.with_progress:
         doc_count = sum([
             get_doc_count_in_domain_by_type(self.domain, doc_type, XFormInstance.get_db())
             for doc_type in doc_types
         ])
         prefix = "{} ({})".format(progress_name, ', '.join(doc_types))
         return with_progress_bar(iterable, doc_count, prefix=prefix, oneline=False)
     else:
         return iterable
def print_counts_by_default_backend(query):
    counts = defaultdict(int)
    for phone in with_progress_bar(query, len(query), oneline=True):
        default_backend = SQLMobileBackend.load_default_by_phone_and_domain(
            SQLMobileBackend.SMS,
            clean_phone_number(phone.phone_number),
            domain=phone.domain)
        counts[default_backend.name] += 1
    print("Counts by default backend")
    for default, count in sorted(counts.items()):
        print("{:<25}{:>4}".format(default, count))
示例#27
0
    def reconcile_repeat_records(self, voucher_updates):
        """
        Mark updated records as "succeeded", all others as "cancelled"
        Delete duplicate records if any exist
        """
        print "Reconciling repeat records"
        chemist_voucher_repeater_id = 'be435d3f407bfb1016cc89ebbf8146b1'
        lab_voucher_repeater_id = 'be435d3f407bfb1016cc89ebbfc42a47'

        already_seen = set()
        updates_by_voucher_id = {update.id: update for update in voucher_updates}

        headers = ['record_id', 'voucher_id', 'status']
        rows = []

        get_db = (lambda: IterDB(RepeatRecord.get_db())) if self.commit else MagicMock
        with get_db() as iter_db:
            for repeater_id in [chemist_voucher_repeater_id, lab_voucher_repeater_id]:
                print "repeater {}".format(repeater_id)
                records = iter_repeat_records_by_domain(self.domain, repeater_id=repeater_id)
                record_count = get_repeat_record_count(self.domain, repeater_id=repeater_id)
                for record in with_progress_bar(records, record_count):
                    if record.payload_id in already_seen:
                        status = "deleted"
                        iter_db.delete(record)
                    elif record.payload_id in updates_by_voucher_id:
                        # add successful attempt
                        status = "succeeded"
                        attempt = RepeatRecordAttempt(
                            cancelled=False,
                            datetime=datetime.datetime.utcnow(),
                            failure_reason=None,
                            success_response="Paid offline via import_voucher_confirmations",
                            next_check=None,
                            succeeded=True,
                        )
                        record.add_attempt(attempt)
                        iter_db.save(record)
                    else:
                        # mark record as canceled
                        record.add_attempt(RepeatRecordAttempt(
                            cancelled=True,
                            datetime=datetime.datetime.utcnow(),
                            failure_reason="Cancelled during import_voucher_confirmations",
                            success_response=None,
                            next_check=None,
                            succeeded=False,
                        ))
                        iter_db.save(record)

                    already_seen.add(record.payload_id)
                    rows.append([record._id, record.payload_id, status])

        self.write_csv('repeat_records', headers, rows)
 def _rebuild_cases(self):
     user = CouchUser.get_by_user_id(self.user_id)
     reason = "User %s forms archived for domain %s by system" % (user.raw_username, self.domain)
     form_processor_interface = FormProcessorInterface(self.domain)
     with open("cases_rebuilt.txt", "w+b") as case_log:
         for case_id in with_progress_bar(self.case_ids_to_rebuild):
             case_log.write("%s\n" % case_id)
             rebuild_case_from_forms(self.domain, case_id, RebuildWithReason(reason=reason))
             ledgers = form_processor_interface.ledger_db.get_ledgers_for_case(case_id)
             for ledger in ledgers:
                 form_processor_interface.ledger_processor.rebuild_ledger_state(
                     case_id, ledger.section_id, ledger.entry_id)
 def handle(self, **options):
     failed_domains = []
     domains = options['domains'] or [d['key'] for d in Domain.get_all(include_docs=False)]
     print('Generating data dictionary for {} domains'.format(len(domains)))
     for domain in with_progress_bar(domains):
         try:
             generate_data_dictionary(domain)
         except OldExportsEnabledException:
             failed_domains.append(domain)
     print('--- Failed Domains ---')
     for domain in failed_domains:
         print(domain)
示例#30
0
def _delete_all_forms(domain_name):
    logger.info('Deleting forms...')
    form_ids = list(
        itertools.chain(*[
            XFormInstance.objects.get_form_ids_in_domain(
                domain_name, doc_type) for doc_type in doc_type_to_state
        ]))
    for form_id_chunk in chunked(
            with_progress_bar(form_ids, stream=silence_during_tests()), 500):
        XFormInstance.objects.soft_delete_forms(domain_name,
                                                list(form_id_chunk))
    logger.info('Deleting forms complete.')
示例#31
0
    def handle(self, domain, case_type, log_file_name, case_ids, **options):
        commit = options['commit']
        deletion_id = options['deletion_id']

        if not case_ids:
            case_ids = self.get_case_ids(domain, case_type)

        with open(log_file_name, 'w') as log_file:
            logger = self.get_logger(log_file)
            for case_id in with_progress_bar(case_ids):
                if self.should_delete(domain, case_id):
                    self.delete_case(case_id, commit, deletion_id, domain, logger, case_type)
 def delete_from_file(self):
     with open(self.filename) as f:
         doc_count = sum(1 for line in f)
     with open(self.filename) as f:
         with IterDB(XFormInstance.get_db(), throttle_secs=2, chunksize=100) as iter_db:
             for line in with_progress_bar(f, length=doc_count):
                 doc = json.loads(line)
                 assert doc['xmlns'] == DEVICE_LOG_XMLNS
                 assert doc['doc_type'] == 'XFormInstance'
                 iter_db.delete(doc)
     if iter_db.errors_by_type:
         print 'There were some errors', iter_db.errors_by_type
示例#33
0
    def handle(self, **options):
        rule = self.get_rule(options['domain'], options['rule_id'])

        print("Fetching case ids...")
        case_ids = CaseAccessors(rule.domain).get_case_ids_in_domain(rule.case_type)
        case_id_chunks = list(chunked(case_ids, 10))

        for case_id_chunk in with_progress_bar(case_id_chunks):
            case_id_chunk = list(case_id_chunk)
            with CriticalSection([get_sync_key(case_id) for case_id in case_id_chunk], timeout=5 * 60):
                for case in CaseAccessors(rule.domain).get_cases(case_id_chunk):
                    rule.run_rule(case, utcnow())
示例#34
0
def get_current_apps(reset=False, batchsize=10):
    '''
    Only examine the most recent version of any application. This does look at linked and remote apps.
    Note that this doesn't support resumable execution, as the effort was out of scope,
    and therefore reset is unused.
    '''
    query = GlobalAppConfig.objects.values_list('app_id',
                                                flat=True).order_by('id')
    count = query.count()
    iter = get_current_apps_iter(query, batchsize)

    return with_progress_bar(iter, count)
示例#35
0
 def handle(self, **options):
     failed_domains = []
     domains = options['domains'] or [d['key'] for d in Domain.get_all(include_docs=False)]
     print('Generating data dictionary for {} domains'.format(len(domains)))
     for domain in with_progress_bar(domains):
         try:
             generate_data_dictionary(domain)
         except OldExportsEnabledException:
             failed_domains.append(domain)
     print('--- Failed Domains ---')
     for domain in failed_domains:
         print(domain)
    def handle(self, **options):
        log_path = options.get("log_path")
        verify_only = options.get("verify_only", False)
        skip_verify = options.get("skip_verify", False)

        if not log_path:
            date = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H-%M-%S.%f')
            command_name = self.__class__.__module__.split('.')[-1]
            log_path = f"{command_name}_{date}.log"

        if log_path != "-" and os.path.exists(log_path):
            raise CommandError(f"Log file already exists: {log_path}")

        if verify_only and skip_verify:
            raise CommandError(
                "verify_only and skip_verify are mutually exclusive")

        self.diff_count = 0
        doc_index = 0

        domains = options["domains"]
        if domains:
            doc_count = self._get_couch_doc_count_for_domains(domains)
            sql_doc_count = self._get_sql_doc_count_for_domains(domains)
            docs = self._iter_couch_docs_for_domains(domains)
        else:
            doc_count = self._get_couch_doc_count_for_type()
            sql_doc_count = self.sql_class().objects.count()
            docs = self._get_all_couch_docs_for_model()

        print(f"\n\nDetailed log output file: {log_path}")
        print("Found {} {} docs and {} {} models".format(
            doc_count,
            self.couch_doc_type(),
            sql_doc_count,
            self.sql_class().__name__,
        ))

        with self._open_log(log_path) as logfile:
            for doc in with_progress_bar(docs, length=doc_count,
                                         oneline=False):
                doc_index += 1
                if not verify_only:
                    self._migrate_doc(doc, logfile)
                if not skip_verify:
                    self._verify_doc(doc, logfile, exit=not verify_only)
                if doc_index % 1000 == 0:
                    print(f"Diff count: {self.diff_count}")

        print(f"Processed {doc_index} documents")
        if not skip_verify:
            print(f"Found {self.diff_count} differences")
    def create_repeat_records_on_dest_repeater(self, source_repeater_id,
                                               dest_repeater_id, state):
        dest_repeater = Repeater.get(dest_repeater_id)
        retriggered = set()

        records = iter_repeat_records_by_domain(domain,
                                                repeater_id=source_repeater_id,
                                                state=state)
        record_count = get_repeat_record_count(domain,
                                               repeater_id=source_repeater_id,
                                               state=state)
        accessor = CaseAccessors(domain)
        print("Iterating over records and adding new record for them")
        for record in with_progress_bar(records, length=record_count):
            if record.payload_id in retriggered:
                self.record_failure(record.get_id,
                                    record.payload_id,
                                    error_message="Already triggered")
                continue
            try:
                episode = accessor.get_case(record.payload_id)
                episode_case_properties = episode.dynamic_case_properties()
                if (episode_case_properties.get('nikshay_registered', 'false')
                        == 'false' and episode_case_properties.get(
                            'private_nikshay_registered', 'false') == 'false'
                        and not episode_case_properties.get('nikshay_id')
                        and episode_case_properties.get('episode_type')
                        == 'confirmed_tb'
                        and is_valid_episode_submission(episode)):
                    new_record = RepeatRecord(
                        domain=domain,
                        next_check=datetime.utcnow(),
                        repeater_id=dest_repeater_id,
                        repeater_type=dest_repeater.doc_type,
                        payload_id=record.payload_id,
                    )
                    if not self.dry_run:
                        new_record.save()
                    retriggered.add(record.payload_id)
                    self.add_row(
                        record,
                        episode_case_properties.get('migration_created_case'),
                        new_record.get_id)
                else:
                    self.record_failure(record.get_id,
                                        record.payload_id,
                                        error_message="Not to be re-triggered")
            except Exception as e:
                self.record_failure(record.get_id,
                                    record.payload_id,
                                    error_message="{error}: {message}".format(
                                        error=e.__name__, message=e.message))
def iter_chunks(model_class, field, domain, chunk_size=5000):
    where = Q(domain=domain)
    row_count = estimate_partitioned_row_count(model_class, where)
    rows = paginate_query_across_partitioned_databases(
        model_class,
        where,
        values=[field],
        load_source='couch_to_sql_migration',
        query_size=chunk_size,
    )
    values = (r[0] for r in rows)
    values = with_progress_bar(values, row_count, oneline="concise")
    yield from chunked(values, chunk_size, list)
 def _remove_ledger_transactions(self):
     with open("ledger_transactions_removed_case_ids.txt", "w+b") as case_ids_log:
         forms_iterated = 0
         for xform in with_progress_bar(self.forms):
             forms_iterated += 1
             if forms_iterated % 100 == 0:
                 print("traversed %s forms" % forms_iterated)
             ledger_case_ids = get_case_ids_from_stock_transactions(xform)
             if ledger_case_ids:
                 ledger_case_ids = list(ledger_case_ids)
                 for ledger_case_id in ledger_case_ids:
                     case_ids_log.write("%s\n" % ledger_case_id)
                 LedgerAccessorSQL.delete_ledger_transactions_for_form(ledger_case_ids, xform.form_id)
示例#40
0
 def handle(self, domain, user_id, **options):
     get_forms = XFormInstance.objects.get_forms
     form_ids = XFormInstance.objects.get_form_ids_for_user(domain, user_id)
     print("Found %s forms for user" % len(form_ids))
     response = input(
         "Are you sure you want to archive them? (yes to proceed)")
     if response == 'yes':
         with open("archived_forms_for_user_%s.txt" % user_id, 'wb') as log:
             for ids in chunked(with_progress_bar(form_ids), 100):
                 ids = list([f for f in ids if f])
                 for form in get_forms(ids, domain):
                     log.write(form.form_id + '\n')
                     form.archive()
示例#41
0
    def handle(self, **kwargs):
        domains = kwargs['domains'].split(',')

        for domain in domains:
            print("Resync all contacts' phone numbers for project %s  " %
                  domain)
            print("Synching for phone numbers")
            commcare_user_ids = (
                CommCareUser.ids_by_domain(domain, is_active=True) +
                CommCareUser.ids_by_domain(domain, is_active=False))
            for user_id in with_progress_bar(commcare_user_ids):
                sms_sync_user_phone_numbers.delay(user_id)
            self.sync_cases(domain)
示例#42
0
    def handle(self, file_path, **options):
        with open(file_path, 'r') as f:
            form_ids = [line.strip() for line in f.readlines()]

        output_path = f"case_ids_{datetime.utcnow().strftime('%Y-%m-%dT%H-%M-%S', )}.csv"
        print(f"Writing data to {output_path}")
        with open(output_path, 'w') as out:
            writer = csv.writer(out)
            writer.writerow(["form_id", "case_id"])
            for form_id, case_ids in _get_case_ids(
                    with_progress_bar(form_ids)):
                for case_id in case_ids:
                    writer.writerow([form_id, case_id])
示例#43
0
 def handle(self, **options):
     print('Generating data dictionary for domains')
     failed_domains = []
     for domain_dict in with_progress_bar(
             Domain.get_all(include_docs=False)):
         domain = domain_dict['key']
         try:
             generate_data_dictionary(domain)
         except OldExportsEnabledException:
             failed_domains.append(domain)
     print('--- Failed Domains ---')
     for domain in failed_domains:
         print(domain)
示例#44
0
    def handle(self, **kwargs):
        domains = kwargs['domains'].split(',')

        for domain in domains:
            print("Resync all contacts' phone numbers for project %s  " % domain)
            print("Synching for phone numbers")
            commcare_user_ids = (
                CommCareUser.ids_by_domain(domain, is_active=True) +
                CommCareUser.ids_by_domain(domain, is_active=False)
            )
            for user_id in with_progress_bar(commcare_user_ids):
                sms_sync_user_phone_numbers.delay(user_id)
            self.sync_cases(domain)
示例#45
0
 def handle(self, dry_run=False, *args, **options):
     self.dry_run = dry_run
     self.reports_using_transform = set()
     report_ids = get_doc_ids_by_class(ReportConfiguration)
     res = iter_update(
         db=ReportConfiguration.get_db(),
         fn=self.migrate_report,
         ids=with_progress_bar(report_ids),
         verbose=True,
     )
     print "Found {} reports using the transform:".format(len(self.reports_using_transform))
     print "\n".join(self.reports_using_transform)
     print "Updated the following reports:"
     print "\n".join(res.updated_ids)
示例#46
0
 def with_progress(self, doc_diffs, statedb, select):
     counts = statedb.get_doc_counts()
     if "doc_ids" in select:
         count = len(select["doc_ids"])
     elif "by_kind" in select:
         count = sum(len(v) for v in select["by_kind"].values() if v)
     elif select:
         count = counts.get(select["kind"], Counts())
         count = count.changes if self.changes else count.diffs
     else:
         count = sum(c.changes if self.changes else c.diffs
                     for c in counts.values())
     return with_progress_bar(
         doc_diffs, count, "Docs", oneline=False, stream=sys.stderr)
示例#47
0
 def _update_cases(self, case_ids_with_invalid_phone_number):
     exceptions_raised = 0
     with open(
             'invalid_phone_numbers_with_91_part_%s_updated.csv' %
             self.db_alias, 'w') as output:
         writer = csv.writer(output)
         writer.writerow(['Case Id'])
         case_ids_to_update_chunk = list(
             chunked(case_ids_with_invalid_phone_number, 100))
         for chunk in with_progress_bar(case_ids_to_update_chunk):
             case_ids_list = self._reassured_case_ids_to_update(chunk)
             [writer.writerow([case_id]) for case_id in case_ids_list]
             exceptions_raised = self._submit_update_form(
                 case_ids_list, exceptions_raised)
    def handle(self, *args, **options):
        self.verbose = options["verbosity"] > 1
        timeout = options["connect_timeout"]
        castore = options["ca_bundle"]

        # sanity-check options
        if timeout < 0:
            raise CommandError(f"Invalid timeout value: {timeout}")
        if castore is not None and not os.path.isfile(castore):
            raise CommandError(f"Invalid CA store file: {castore}")

        # determine which verification method to use
        if options["ssl_only"]:
            iter_fails = self.verify_ssl_domains
        else:
            iter_fails = self.verify_connections

        failures = []
        conn_generator = with_progress_bar(
            list(ConnectionSettings.objects.order_by("url")),
            oneline=(not self.verbose),
            stream=self.stderr.
            _out,  # OutputWrapper.write() does not play nice
        )
        request_kw = {
            "verify": (castore or True),
            "timeout": timeout,
        }
        for conn, exc in iter_fails(conn_generator, request_kw):
            code = getattr(getattr(exc, "response", None), "status_code",
                           "err")
            failures.append((conn, code, exc.__class__.__name__, str(exc)))

        if failures:
            csv_rows = [[
                "domain",
                "setting name",
                "url",
                "error type",
                "error message",
            ]]
            self.console(f"ERROR: {len(failures)} failure(s):",
                         self.style.ERROR)
            for conn, code, err, msg in failures:
                self.console(f"FAIL [{code}]: {conn.url}")
                csv_rows.append([conn.domain, conn.name, conn.url, err, msg])
            # write this last to keep logging separate (in case STDOUT is used)
            writer = csv.writer(options["csv_out"])
            writer.writerows(csv_rows)
 def fix_apps(unique_id_to_xmlns_map, app_to_unique_ids_map, log_file, dry_run):
     for (app_id, domain), form_unique_ids in with_progress_bar(list(app_to_unique_ids_map.items())):
         app = get_app(domain, app_id)
         for build in [app] + get_saved_apps(app):
             for form_unique_id in form_unique_ids:
                 if unique_id_to_xmlns_map.get(form_unique_id):
                     set_xmlns_on_form(
                         form_unique_id,
                         unique_id_to_xmlns_map[form_unique_id],
                         build,
                         log_file,
                         dry_run
                     )
                 else:
                     print('Could not find unique_id {} in build {}'.format(form_unique_id, build._id))
示例#50
0
    def _with_progress(self, doc_types, iterable, progress_name='Migrating'):
        doc_count = sum([
            get_doc_count_in_domain_by_type(self.domain, doc_type, XFormInstance.get_db())
            for doc_type in doc_types
        ])
        if self.timing_context:
            current_timer = self.timing_context.peek()
            current_timer.normalize_denominator = doc_count

        if self.with_progress:
            prefix = "{} ({})".format(progress_name, ', '.join(doc_types))
            return with_progress_bar(iterable, doc_count, prefix=prefix, oneline=False)
        else:
            self.log_info("{} ({})".format(doc_count, ', '.join(doc_types)))
            return iterable
    def handle(self, month_year, file_path, **options):
        month_year_parsed = dateutil.parser.parse('1-' + month_year)
        start_date = month_year_parsed.replace(day=1)
        end_date = start_date + relativedelta(day=1, months=+1, microseconds=-1)

        with open(file_path, 'wb') as file_object:
            writer = csv.writer(file_object)
            writer.writerow([
                'domain name',
                'user id',
                'total number of forms submitted in a month',
                'used case management',
                'multiple form types'
            ])

            for domain in with_progress_bar(Domain.get_all(include_docs=False)):
                domain_name = domain['key']
                user_ids = CommCareUser.ids_by_domain(domain=domain_name)
                for users in chunked(user_ids, 100):
                    forms = get_forms_for_users(domain_name, users, start_date, end_date)
                    user_dict = defaultdict(list)
                    for form in forms:
                        user_id = form['form']['meta']['userID']
                        user_dict[user_id].append(form)
                    for user_id, forms in six.iteritems(user_dict):
                        has_two_forms_submitted = False
                        has_case = False
                        unique_forms = set()
                        for form in forms:
                            if has_case and has_two_forms_submitted:
                                break
                            if not has_case and form.get('form', {}).get('case'):
                                has_case = True
                            if not has_two_forms_submitted:
                                xmlns = form.get('form', {}).get('@xmlns')
                                if xmlns:
                                    unique_forms.add(xmlns)
                                    if len(unique_forms) >= 2:
                                        has_two_forms_submitted = True
                        writer.writerow([
                            domain_name,
                            user_id,
                            len(forms),
                            has_case,
                            has_two_forms_submitted
                        ])
    def handle(self, domain, **options):
        dryrun = options["dryrun"]
        verbose = options["verbose"] or dryrun

        mode = options['mode']
        if mode == 'stats':
            self.print_stats()
            return

        batch_size = {
            'single': 1,
            'batch': options['batch_size'],
            'all': None
        }[mode]

        if verbose and batch_size:
            root_logger = logging.getLogger('')
            root_logger.setLevel(logging.DEBUG)

        if not batch_size:
            if dryrun:
                raise CommandError('Dry run only for single / batch modes')
            total = UnfinishedSubmissionStub.objects.count()
            stub_iterator = with_progress_bar(UnfinishedSubmissionStub.objects.all(), total, oneline=False)
            for stub in stub_iterator:
                reprocess_unfinished_stub(stub)
        else:
            paginator = Paginator(UnfinishedSubmissionStub.objects.all(), batch_size)
            for page_number in paginator.page_range:
                page = paginator.page(page_number)
                for stub in page.object_list:
                    result = reprocess_unfinished_stub(stub, save=not dryrun)
                    if result:
                        cases = ', '.join([c.case_id for c in result.cases])
                        ledgers = ', '.join([l.ledger_reference for l in result.ledgers])
                        logger.info("Form re-processed successfully: {}:{}".format(
                            result.form.domain, result.form.form_id, cases, ledgers
                        ))
                if not page.has_next():
                    print("All forms processed")
                elif not confirm():
                    break
示例#53
0
    def dump_to_file(self):
        try:
            doc_count = XFormInstance.get_db().view(
                'couchforms/by_xmlns',
                key=DEVICE_LOG_XMLNS,
                reduce=True,
            ).one()['value']
        except TypeError:
            doc_count = 0

        device_log_ids = [row['id'] for row in XFormInstance.get_db().view(
            'couchforms/by_xmlns',
            key=DEVICE_LOG_XMLNS,
            reduce=False,
        )]

        with open(self.filename, 'w') as f:
            device_log_docs = iter_docs_with_retry(XFormInstance.get_db(), device_log_ids)
            for doc in with_progress_bar(device_log_docs, length=doc_count):
                f.write(json.dumps(doc) + '\n')
def perform_resave_on_xforms(domain, start_date, end_date, no_input):
    _, _, xform_ids_missing_in_es, _ = compare_xforms(domain, 'XFormInstance', start_date, end_date)
    print("%s Ids found for xforms missing in ES." % len(xform_ids_missing_in_es))
    if len(xform_ids_missing_in_es) < 1000:
        print(xform_ids_missing_in_es)
    if no_input is not True:
        ok = input("Type 'ok' to continue: ")
        if ok != "ok":
            print("No changes made")
            return
    form_accessor = FormAccessors(domain)
    for xform_ids in chunked(with_progress_bar(xform_ids_missing_in_es), 100):
        xforms = form_accessor.get_forms(list(xform_ids))
        found_xform_ids = set()

        for xform in xforms:
            resave_form(domain, xform)
            found_xform_ids.add(xform.form_id)

        for xform_id in set(xform_ids) - found_xform_ids:
            print("form not found %s" % xform_id)
def perform_resave_on_cases(domain, start_date, end_date, no_input):
    _, _, case_ids_missing_in_es, _ = compare_cases(domain, 'CommCareCase', start_date, end_date)
    print("%s Ids found for cases missing in ES." % len(case_ids_missing_in_es))
    if len(case_ids_missing_in_es) < 1000:
        print(case_ids_missing_in_es)
    if no_input is not True:
        ok = input("Type 'ok' to continue: ")
        if ok != "ok":
            print("No changes made")
            return
    case_accessor = CaseAccessors(domain)
    for case_ids in chunked(with_progress_bar(case_ids_missing_in_es), 100):
        cases = case_accessor.get_cases(list(case_ids))
        found_case_ids = set()

        for case in cases:
            resave_case(domain, case, send_post_save_signal=False)
            found_case_ids.add(case.case_id)

        for case_id in set(case_ids) - found_case_ids:
            print("case not found %s" % case_id)
示例#56
0
def migrate_domain(domain, dryrun=False, force_convert_columns=False):
    from couchexport.models import SavedExportSchema
    export_count = stale_get_export_count(domain)
    metas = []
    if export_count:
        for old_export in with_progress_bar(
                stale_get_exports_json(domain),
                length=export_count,
                prefix=domain):
            try:
                _, migration_meta = convert_saved_export_to_export_instance(
                    domain,
                    SavedExportSchema.wrap(old_export),
                    dryrun=dryrun,
                    force_convert_columns=force_convert_columns,
                )
            except Exception, e:
                print 'Failed parsing {}: {}'.format(old_export['_id'], e)
                raise e
            else:
                metas.append(migration_meta)
示例#57
0
    def handle(self, domains, file_name, **options):
        blob_db = get_blob_db()

        with open(file_name, 'w', encoding='utf-8') as csv_file:
            field_names = ['domain', 'archived', 'form_id', 'received_on']
            csv_writer = csv.DictWriter(csv_file, field_names)
            csv_writer.writeheader()
            for domain in domains:
                self.stdout.write("Handling domain %s" % domain)
                form_db = FormAccessors(domain)
                form_ids = form_db.get_all_form_ids_in_domain()
                form_ids.extend(form_db.get_all_form_ids_in_domain('XFormArchived'))
                for form in with_progress_bar(form_db.iter_forms(form_ids), len(form_ids)):
                    if isinstance(form, CouchForm):
                        meta = form.blobs.get(ATTACHMENT_NAME)
                        if not meta or not blob_db.exists(key=meta.key):
                            self.write_row(csv_writer, domain, form.is_archived, form.received_on, form.form_id)
                    elif isinstance(form, XFormInstanceSQL):
                        meta = form.get_attachment_meta(ATTACHMENT_NAME)
                        if not meta or not blob_db.exists(key=meta.key):
                            self.write_row(csv_writer, domain, form.is_archived, form.received_on, form.form_id)
                    else:
                        raise Exception("not sure how we got here")
 def handle(self, **options):
     self.options = options
     user_ids = with_progress_bar(self.get_user_ids())
     iter_update(CouchUser.get_db(), self.migrate_user, user_ids, verbose=True)
def clean_users():
    all_ids = with_progress_bar(get_bad_user_ids())
    iter_update(CommCareUser.get_db(), clean_user, all_ids, verbose=True)