def delete_already_successful_records(self, redundant_records): log = [] with IterDB(RepeatRecord.get_db()) as iter_db: for record in redundant_records: iter_db.delete(record) log.append((record._id, record.payload_id, record.failure_reason, 'Yes', 'Already Sent')) return log
def handle(self, *args, **options): self.stdout.write("\n") self.stdout.write('fetching repeater data...') repeater_summary = Repeater.get_db().view('repeaters/repeaters', group_level=1, reduce=True).all() repeaters_by_domain = { info['key'][0]: info['value'] for info in repeater_summary } self.stdout.write("\n") self.stdout.write('fetching repeat record data...') repeat_records_summary = RepeatRecord.get_db().view( 'repeaters/repeat_records', group_level=1, reduce=True).all() self.stdout.write("\n\n\n") self.stdout.write("Domain\tRepeaters\tRepeatRecords") for info in repeat_records_summary: domain = info['key'][0] num_repeaters = repeaters_by_domain.get(domain, 0) num_repeat_records = info['value'] self.stdout.write( f'{domain}\t{num_repeaters}\t{num_repeat_records}') self.stdout.write('*' * 230) self.stdout.write('done...')
def resolve_duplicates(self, records_by_payload_id): log = [] with IterDB(RepeatRecord.get_db()) as iter_db: for payload_id, records in records_by_payload_id.items(): log.append((records[0]._id, payload_id, records[0].failure_reason, 'No', '')) if len(records) > 1: for record in records[1:]: iter_db.delete(record) log.append((record._id, payload_id, record.failure_reason, 'Yes', 'Duplicate')) return log
def get_repeat_records_by_payload_id(domain, payload_id): from .models import RepeatRecord results = RepeatRecord.get_db().view( 'repeaters/repeat_records_by_payload_id', startkey=[domain, payload_id], endkey=[domain, payload_id], include_docs=True, reduce=False, descending=True).all() return [RepeatRecord.wrap(result['doc']) for result in results]
def delete_all_repeat_records(): from .models import RepeatRecord results = RepeatRecord.get_db().view('repeaters/repeat_records', reduce=False).all() for result in results: try: repeat_record = RepeatRecord.get(result['id']) except Exception: pass else: repeat_record.delete()
def reconcile_repeat_records(self, voucher_updates): """ Mark updated records as "succeeded", all others as "cancelled" Delete duplicate records if any exist """ print "Reconciling repeat records" chemist_voucher_repeater_id = 'be435d3f407bfb1016cc89ebbf8146b1' lab_voucher_repeater_id = 'be435d3f407bfb1016cc89ebbfc42a47' already_seen = set() updates_by_voucher_id = {update.id: update for update in voucher_updates} headers = ['record_id', 'voucher_id', 'status'] rows = [] get_db = (lambda: IterDB(RepeatRecord.get_db())) if self.commit else MagicMock with get_db() as iter_db: for repeater_id in [chemist_voucher_repeater_id, lab_voucher_repeater_id]: print "repeater {}".format(repeater_id) records = iter_repeat_records_by_domain(self.domain, repeater_id=repeater_id) record_count = get_repeat_record_count(self.domain, repeater_id=repeater_id) for record in with_progress_bar(records, record_count): if record.payload_id in already_seen: status = "deleted" iter_db.delete(record) elif record.payload_id in updates_by_voucher_id: # add successful attempt status = "succeeded" attempt = RepeatRecordAttempt( cancelled=False, datetime=datetime.datetime.utcnow(), failure_reason=None, success_response="Paid offline via import_voucher_confirmations", next_check=None, succeeded=True, ) record.add_attempt(attempt) iter_db.save(record) else: # mark record as canceled record.add_attempt(RepeatRecordAttempt( cancelled=True, datetime=datetime.datetime.utcnow(), failure_reason="Cancelled during import_voucher_confirmations", success_response=None, next_check=None, succeeded=False, )) iter_db.save(record) already_seen.add(record.payload_id) rows.append([record._id, record.payload_id, status]) self.write_csv('repeat_records', headers, rows)
def iter_repeat_records_by_repeater(domain, repeater_id, chunk_size=1000): from corehq.motech.repeaters.models import RepeatRecord kwargs = { 'include_docs': True, 'reduce': False, 'descending': True, } kwargs.update(_get_startkey_endkey_all_records(domain, repeater_id)) for doc in paginate_view(RepeatRecord.get_db(), 'repeaters/repeat_records', chunk_size, **kwargs): yield RepeatRecord.wrap(doc['doc'])
def iterate_repeat_records(due_before, chunk_size=10000, database=None): from .models import RepeatRecord json_now = json_format_datetime(due_before) view_kwargs = { 'reduce': False, 'startkey': [None], 'endkey': [None, json_now, {}], 'include_docs': True } for doc in paginate_view(RepeatRecord.get_db(), 'repeaters/repeat_records_by_next_check', chunk_size, **view_kwargs): yield RepeatRecord.wrap(doc['doc'])
def _get_ids(data, domain): if not data: return [] if data.get('payload_id', None): results = get_repeat_records_by_payload_id(domain, data['payload_id']) else: from corehq.motech.repeaters.models import RepeatRecord kwargs = { 'include_docs': True, 'reduce': False, 'descending': True, } kwargs.update(_get_startkey_endkey_all_records(domain, data['repeater'])) results = RepeatRecord.get_db().view('repeaters/repeat_records', **kwargs).all() ids = [x['id'] for x in results] return ids
def iterate_repeat_record_ids(due_before, chunk_size=10000): """ Yields repeat record ids only. Use chunk_size to optimize db query. Has no effect on # of items returned. """ from .models import RepeatRecord json_due_before = json_format_datetime(due_before) view_kwargs = { 'reduce': False, 'startkey': [None], 'endkey': [None, json_due_before, {}], 'include_docs': False } for doc in paginate_view( RepeatRecord.get_db(), 'repeaters/repeat_records_by_next_check', chunk_size, **view_kwargs): yield doc['id']
def iterate_repeat_records_for_ids(doc_ids): from .models import RepeatRecord return (RepeatRecord.wrap(doc) for doc in iter_docs(RepeatRecord.get_db(), doc_ids))