def test_clone_repeaters(self): from corehq.motech.repeaters.models import Repeater from corehq.motech.repeaters.models import CaseRepeater from corehq.motech.repeaters.models import FormRepeater self.assertEqual(0, len(Repeater.by_domain(self.new_domain))) case_repeater = CaseRepeater( domain=self.old_domain, url='case-repeater-url', ) case_repeater.save() self.addCleanup(case_repeater.delete) form_repeater = FormRepeater( domain=self.old_domain, url='form-repeater-url', ) form_repeater.save() self.addCleanup(form_repeater.delete) self.make_clone(include=['repeaters']) cloned_repeaters = Repeater.by_domain(self.new_domain) self.assertEqual(2, len(cloned_repeaters)) self.assertEqual({'CaseRepeater', 'FormRepeater'}, {repeater.doc_type for repeater in cloned_repeaters})
def migrate_auth_field(apps, schema_editor): repeater_ids = [row['id'] for row in Repeater.view( 'repeaters/repeaters', include_docs=False, reduce=False, wrap_doc=False )] iter_update( db=Repeater.get_db(), fn=migrate_repeater, ids=repeater_ids, )
def migrate_auth_field(apps, schema_editor): repeater_ids = [ row['id'] for row in Repeater.view('repeaters/repeaters', include_docs=False, reduce=False, wrap_doc=False) ] iter_update( db=Repeater.get_db(), fn=migrate_repeater, ids=repeater_ids, )
def send_repeater_payloads(repeater_id, payload_ids, email_id): from corehq.motech.repeaters.models import Repeater, RepeatRecord repeater = Repeater.get(repeater_id) repeater_type = repeater.doc_type payloads = dict() headers = ['note'] result_file_name = "bulk-payloads-%s-%s-%s.csv" % ( repeater.doc_type, repeater.get_id, datetime.datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")) def get_payload(payload_id): dummy_repeat_record = RepeatRecord( domain=repeater.domain, next_check=datetime.datetime.utcnow(), repeater_id=repeater.get_id, repeater_type=repeater_type, payload_id=payload_id, ) payload = repeater.get_payload(dummy_repeat_record) if isinstance(payload, dict): return payload else: return json.loads(payload) def populate_payloads(headers): for payload_id in payload_ids: try: payload = get_payload(payload_id) payloads[payload_id] = payload headers = list(set(headers + payload.keys())) except Exception as e: payloads[payload_id] = { 'note': 'Could not generate payload, %s' % str(e) } return headers def create_result_file(): _, temp_file_path = tempfile.mkstemp() with open(temp_file_path, 'w') as csvfile: headers.append('payload_id') writer = csv.DictWriter(csvfile, fieldnames=headers) writer.writeheader() for payload_id, payload in payloads.items(): row = payload row['payload_id'] = payload_id writer.writerow(row) return temp_file_path def email_result(download_url): send_HTML_email( 'Bulk Payload generated for %s' % repeater_type, email_id, 'This email is to just let you know that there is a ' 'download waiting for you at %s. It will expire in 24 hours' % download_url) headers = populate_payloads(headers) temp_file_path = create_result_file() download_url = ExposeBlobDownload().get_link(temp_file_path, result_file_name, Format.CSV) email_result(download_url)
def obj_update(self, bundle, **kwargs): bundle.obj = Repeater.get(kwargs['pk']) assert bundle.obj.domain == kwargs['domain'] bundle = self._update(bundle) assert bundle.obj.domain == kwargs['domain'] bundle.obj.save() return bundle
def obj_update(self, bundle, **kwargs): bundle.obj = Repeater.get(kwargs['pk']) assert bundle.obj.domain == kwargs['domain'] bundle = self._update(bundle) assert bundle.obj.domain == kwargs['domain'] bundle.obj.save() return bundle
def handle(self, *args, **options): self.stdout.write("\n") self.stdout.write('fetching repeater data...') repeater_summary = Repeater.get_db().view('repeaters/repeaters', group_level=1, reduce=True).all() repeaters_by_domain = { info['key'][0]: info['value'] for info in repeater_summary } self.stdout.write("\n") self.stdout.write('fetching repeat record data...') repeat_records_summary = RepeatRecord.get_db().view( 'repeaters/repeat_records', group_level=1, reduce=True).all() self.stdout.write("\n\n\n") self.stdout.write("Domain\tRepeaters\tRepeatRecords") for info in repeat_records_summary: domain = info['key'][0] num_repeaters = repeaters_by_domain.get(domain, 0) num_repeat_records = info['value'] self.stdout.write( f'{domain}\t{num_repeaters}\t{num_repeat_records}') self.stdout.write('*' * 230) self.stdout.write('done...')
def iter_repeaters(): for result in Repeater.get_db().view('repeaters/repeaters', reduce=False, include_docs=True).all(): try: repeater = Repeater.wrap(result['doc']) except ResourceNotFound: if result['doc']['doc_type'] in DELETED_REPEATER_CLASSES: # repeater is an instance of a class that has been deleted # from the codebase. It is safe to delete because it does # not have repeat records waiting to be sent, and no future # repeat records will be created for it. delete_zombie_repeater_instance(result['doc']) continue else: raise else: yield repeater
def setUpClass(cls) -> None: cls.conn = ConnectionSettings(url="http://url.com", domain='rtest') cls.conn.save() cls.couch_repeaters = [] for r in deepcopy(repeater_test_data): r = Repeater.wrap(r) r.connection_settings_id = cls.conn.id r.save(sync_to_sql=False) cls.couch_repeaters.append(r) return super().setUpClass()
def test_clone_repeaters(self): from corehq.motech.repeaters.models import Repeater from corehq.motech.repeaters.models import CaseRepeater from corehq.motech.repeaters.models import FormRepeater from custom.enikshay.integrations.nikshay.repeaters import NikshayRegisterPatientRepeater self.assertEqual(0, len(Repeater.by_domain(self.new_domain))) self.assertEqual( 0, len(NikshayRegisterPatientRepeater.by_domain(self.new_domain))) case_repeater = CaseRepeater( domain=self.old_domain, url='case-repeater-url', ) case_repeater.save() self.addCleanup(case_repeater.delete) form_repeater = FormRepeater( domain=self.old_domain, url='form-repeater-url', ) form_repeater.save() self.addCleanup(form_repeater.delete) custom_repeater = NikshayRegisterPatientRepeater( domain=self.old_domain, url='99dots') custom_repeater.save() self.addCleanup(custom_repeater.delete) self.make_clone(include=['repeaters']) cloned_repeaters = Repeater.by_domain(self.new_domain) self.assertEqual(3, len(cloned_repeaters)) self.assertEqual( {'CaseRepeater', 'FormRepeater', 'NikshayRegisterPatientRepeater'}, {repeater.doc_type for repeater in cloned_repeaters}) # test cache clearing cloned_niksay_repeaters = NikshayRegisterPatientRepeater.by_domain( self.new_domain) self.assertEqual(1, len(cloned_niksay_repeaters))
def create_repeat_records_on_dest_repeater(self, source_repeater_id, dest_repeater_id, state): dest_repeater = Repeater.get(dest_repeater_id) retriggered = set() records = iter_repeat_records_by_domain(domain, repeater_id=source_repeater_id, state=state) record_count = get_repeat_record_count(domain, repeater_id=source_repeater_id, state=state) accessor = CaseAccessors(domain) print("Iterating over records and adding new record for them") for record in with_progress_bar(records, length=record_count): if record.payload_id in retriggered: self.record_failure(record.get_id, record.payload_id, error_message="Already triggered") continue try: episode = accessor.get_case(record.payload_id) episode_case_properties = episode.dynamic_case_properties() if (episode_case_properties.get('nikshay_registered', 'false') == 'false' and episode_case_properties.get( 'private_nikshay_registered', 'false') == 'false' and not episode_case_properties.get('nikshay_id') and episode_case_properties.get('episode_type') == 'confirmed_tb' and is_valid_episode_submission(episode)): new_record = RepeatRecord( domain=domain, next_check=datetime.utcnow(), repeater_id=dest_repeater_id, repeater_type=dest_repeater.doc_type, payload_id=record.payload_id, ) if not self.dry_run: new_record.save() retriggered.add(record.payload_id) self.add_row( record, episode_case_properties.get('migration_created_case'), new_record.get_id) else: self.record_failure(record.get_id, record.payload_id, error_message="Not to be re-triggered") except Exception as e: self.record_failure(record.get_id, record.payload_id, error_message="{error}: {message}".format( error=e.__name__, message=e.message))
def handle(self, domain, repeater_id, *args, **options): self.domain = domain self.repeater_id = repeater_id repeater = Repeater.get(repeater_id) print("Looking up repeat records for '{}'".format( repeater.friendly_name)) redundant_records = [] records_by_payload_id = defaultdict(list) records = iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE) total_records = 0 for record in records: total_records += 1 most_recent_success = self.most_recent_success.get( record.payload_id) if most_recent_success and record.last_checked < most_recent_success: # another record with this payload has succeeded after this record failed redundant_records.append(record) else: records_by_payload_id[record.payload_id].append(record) unique_payloads = len(records_by_payload_id) redundant_payloads = len(redundant_records) print( "There are {total} total cancelled records, {redundant} with payloads which " "have since succeeded, and {unique} unsent unique payload ids.". format(total=total_records, redundant=redundant_payloads, unique=unique_payloads)) print("Delete {} duplicate records?".format(total_records - unique_payloads)) if not input("(y/n)") == 'y': print("Aborting") return redundant_log = self.delete_already_successful_records( redundant_records) duplicates_log = self.resolve_duplicates(records_by_payload_id) filename = "cancelled_{}_records-{}.csv".format( repeater.__class__.__name__, datetime.datetime.utcnow().isoformat()) print("Writing log of changes to {}".format(filename)) with open(filename, 'w') as f: writer = csv.writer(f) writer.writerow(('RepeatRecord ID', 'Payload ID', 'Failure Reason', 'Deleted?', 'Reason')) writer.writerows(redundant_log) writer.writerows(duplicates_log)
def add_repeater_form(self): if self.request.method == 'POST': return self.repeater_form_class(self.request.POST, domain=self.domain, repeater_class=self.repeater_class) else: repeater_id = self.kwargs['repeater_id'] repeater = Repeater.get(repeater_id) return self.repeater_form_class( domain=self.domain, repeater_class=self.repeater_class, data=repeater.to_json(), submit_btn_text=_("Update Repeater"), )
def add_repeater_form(self): if self.request.method == 'POST': return self.repeater_form_class(self.request.POST, domain=self.domain, repeater_class=self.repeater_class) else: repeater_id = self.kwargs['repeater_id'] repeater = Repeater.get(repeater_id) data = repeater.to_json() data['password'] = PASSWORD_PLACEHOLDER return self.repeater_form_class( domain=self.domain, repeater_class=self.repeater_class, data=data, submit_btn_text=_("Update Repeater"), )
def add_repeater_form(self): if self.request.method == 'POST': return self.repeater_form_class( self.request.POST, domain=self.domain, repeater_class=self.repeater_class ) else: repeater_id = self.kwargs['repeater_id'] repeater = Repeater.get(repeater_id) data = repeater.to_json() data['password'] = PASSWORD_PLACEHOLDER return self.repeater_form_class( domain=self.domain, repeater_class=self.repeater_class, data=data, submit_btn_text=_("Update Repeater"), )
def __init__(self, *args, **kwargs): if kwargs.get('data'): repeater = Repeater.wrap(kwargs['data']) if not repeater.connection_settings_id: repeater.create_connection_settings() self.domain = kwargs.pop('domain') self.repeater_class = kwargs.pop('repeater_class') self.formats = RegisterGenerator.all_formats_by_repeater( self.repeater_class, for_domain=self.domain) conns = ConnectionSettings.objects.filter(domain=self.domain) self.connection_settings_choices = [(c.id, c.name) for c in conns] self.submit_btn_text = kwargs.pop('submit_btn_text', _("Start Forwarding")) super(GenericRepeaterForm, self).__init__(*args, **kwargs) self.set_extra_django_form_fields() self._initialize_crispy_layout()
def handle(self, domain, repeater_id, *args, **options): self.domain = domain self.repeater_id = repeater_id repeater = Repeater.get(repeater_id) print("Looking up repeat records for '{}'".format(repeater.friendly_name)) redundant_records = [] records_by_payload_id = defaultdict(list) records = iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE) total_records = 0 for record in records: total_records += 1 most_recent_success = self.most_recent_success.get(record.payload_id) if most_recent_success and record.last_checked < most_recent_success: # another record with this payload has succeeded after this record failed redundant_records.append(record) else: records_by_payload_id[record.payload_id].append(record) unique_payloads = len(records_by_payload_id) redundant_payloads = len(redundant_records) print ("There are {total} total cancelled records, {redundant} with payloads which " "have since succeeded, and {unique} unsent unique payload ids." .format(total=total_records, redundant=redundant_payloads, unique=unique_payloads)) print("Delete {} duplicate records?".format(total_records - unique_payloads)) if not input("(y/n)") == 'y': print("Aborting") return redundant_log = self.delete_already_successful_records(redundant_records) duplicates_log = self.resolve_duplicates(records_by_payload_id) filename = "cancelled_{}_records-{}.csv".format( repeater.__class__.__name__, datetime.datetime.utcnow().isoformat()) print("Writing log of changes to {}".format(filename)) with open(filename, 'w', encoding='utf-8') as f: writer = csv.writer(f) writer.writerow(('RepeatRecord ID', 'Payload ID', 'Failure Reason', 'Deleted?', 'Reason')) writer.writerows(redundant_log) writer.writerows(duplicates_log)
def used_by(self): """ Returns the names of kinds of things that are currently using this instance. Used for informing users, and determining whether the instance can be deleted. """ from corehq.motech.repeaters.models import Repeater kinds = set() if self.incrementalexport_set.exists(): kinds.add(_('Incremental Exports')) if self.sqldatasetmap_set.exists(): kinds.add(_('DHIS2 DataSet Maps')) if any(r.connection_settings_id == self.id for r in Repeater.by_domain(self.domain)): kinds.add(_('Data Forwarding')) # TODO: Check OpenmrsImporters (when OpenmrsImporters use ConnectionSettings) return kinds
def _test_payload_generator(intent_case, registry_mock_cases=None, expected_updates=None, expected_indices=None, expected_creates=None, expected_close=None): # intent case is the case created in the source domain which is used to trigger the repeater # and which contains the config for updating the case in the target domain registry_mock_cases = _mock_registry() if registry_mock_cases is None else registry_mock_cases repeater = DataRegistryCaseUpdateRepeater(domain=SOURCE_DOMAIN) generator = DataRegistryCaseUpdatePayloadGenerator(repeater) generator.submission_user_id = Mock(return_value='user1') generator.submission_username = Mock(return_value='user1') # target_case is the case in the target domain which is being updated def _get_case(self, case_id, *args, **kwargs): try: return registry_mock_cases[case_id] except KeyError: raise CaseNotFound with patch.object(DataRegistryHelper, "get_case", new=_get_case), \ patch.object(CouchUser, "get_by_user_id", return_value=Mock(username="******")): repeat_record = Mock(repeater=Repeater()) form = DataRegistryUpdateForm(generator.get_payload(repeat_record, intent_case), intent_case) form.assert_form_props({ "source_domain": SOURCE_DOMAIN, "source_form_id": "form123", "source_username": "******", }, device_id=f"{DataRegistryCaseUpdatePayloadGenerator.DEVICE_ID}:{SOURCE_DOMAIN}") form.assert_case_updates(expected_updates or {}) if expected_indices: form.assert_case_index(expected_indices) if expected_creates: form.assert_case_create(expected_creates) if expected_close: form.assert_case_close(expected_close)
def resume_repeater(request, domain, repeater_id): rep = Repeater.get(repeater_id) rep.resume() messages.success(request, "Forwarding resumed!") return HttpResponseRedirect( reverse(DomainForwardingOptionsView.urlname, args=[domain]))
def initialize_repeater(self): return Repeater.get(self.kwargs['repeater_id'])
def initialize_repeater(self): return Repeater.get(self.kwargs['repeater_id'])
def resume_repeater(request, domain, repeater_id): rep = Repeater.get(repeater_id) rep.resume() messages.success(request, "Forwarding resumed!") return HttpResponseRedirect(reverse(DomainForwardingOptionsView.urlname, args=[domain]))
def handle(self, domain, repeater_id, *args, **options): sleep_time = options.get('sleep_time') include_regexps = options.get('include_regexps') exclude_regexps = options.get('exclude_regexps') verbose = options.get('verbose') action = options.get('action') success_message = options.get('success_message') response_status = options.get('response_status') repeater = Repeater.get(repeater_id) print("Looking up repeat records for '{}'".format( repeater.friendly_name)) def meets_filter(record): if exclude_regexps: # Match none of the exclude expressions if record.failure_reason: if any( re.search(exclude_regex, record.failure_reason) for exclude_regex in exclude_regexps): return False if include_regexps: # Match any of the include expressions if not record.failure_reason: return False return any( re.search(include_regex, record.failure_reason) for include_regex in include_regexps) return True # No filter applied records = list( filter( meets_filter, iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE))) if verbose: for record in records: print(record.payload_id, record.failure_reason) total_records = len(records) print("Found {} matching records. {} them?".format( total_records, action)) if not input("(y/n)") == 'y': print("Aborting") return filename = "{}_{}_records-{}.csv".format( action, repeater.__class__.__name__, datetime.datetime.utcnow().strftime('%Y-%m-%d_%H.%M.%S')) with open(filename, 'w', encoding='utf-8') as f: writer = csv.writer(f) writer.writerow(('record_id', 'payload_id', 'state', 'message')) for i, record in enumerate(records): try: if action == 'retrigger': if record.next_check is None: record.next_check = datetime.datetime.utcnow() record.fire(force_send=True) elif action == 'succeed': self._succeed_record(record, success_message, response_status) except Exception as e: print("{}/{}: {} {}".format(i + 1, total_records, 'EXCEPTION', repr(e))) writer.writerow( (record._id, record.payload_id, record.state, repr(e))) else: print("{}/{}: {}, {}".format(i + 1, total_records, record.state, record.attempts[-1].message)) writer.writerow( (record._id, record.payload_id, record.state, record.attempts[-1].message)) if sleep_time: time.sleep(float(sleep_time)) print("Wrote log of changes to {}".format(filename))
def handle(self, file_path, *args, **options): with open(file_path, 'r') as file: doc_ids = [line.strip() for line in file.readlines()] repeater_type = options['repeater_type'] repeater_id = options['repeater_id'] repeater_name_re = None if options['repeater_name']: repeater_name_re = re.compile(options['repeater_name']) if repeater_id: repeater = Repeater.get(repeater_id) if repeater_type and repeater_type != repeater.doc_type: raise CommandError(f"Repeater type does not match: {repeater_type} != {repeater.doc_type}") if repeater_name_re and not repeater_name_re.match(repeater.name): raise CommandError(f"Repeater name does not match: {repeater.name}") def _get_repeaters(doc): assert doc.domain == repeater.domain return [repeater] else: by_domain = {} def _get_repeaters(doc): if doc.domain not in by_domain: repeater_class = get_all_repeater_types()[repeater_type] if repeater_type else None repeaters = get_repeaters_for_type_in_domain(doc.domain, repeater_class) if repeater_name_re: repeaters = [r for r in repeaters if repeater_name_re.match(r.name)] if not repeaters: logger.info(f"No repeaters matched for domain '{doc.domain}'") by_domain[doc.domain] = repeaters return by_domain[doc.domain] def doc_iterator(doc_ids): try: yield from bulk_accessor(doc_ids) except Exception: logger.exception("Unable to fetch bulk docs, falling back to individual fetches") for doc_id in doc_ids: try: yield single_accessor(doc_id) except Exception: logger.exception(f"Unable to fetch doc '{doc_id}'") forms = XFormInstance.objects cases = CommCareCase.objects bulk_accessor = forms.get_forms if options['doc_type'] == 'form' else cases.get_cases single_accessor = forms.get_form if options['doc_type'] == 'form' else cases.get_case for doc_ids in chunked(with_progress_bar(doc_ids), 100): for doc in doc_iterator(list(doc_ids)): try: repeaters = _get_repeaters(doc) except Exception: logger.exception(f"Unable to fetch repeaters for doc '{doc.get_id}'") continue for repeater in repeaters: try: repeater.register(doc) except Exception: logger.exception(f"Unable to create records for doc '{doc.get_id}'")
def num_repeaters(domain): return len(Repeater.by_domain(domain))
def num_repeaters(domain): return len(Repeater.by_domain(domain))
def obj_get_list(self, bundle, domain, **kwargs): repeaters = Repeater.by_domain(domain) return list(repeaters)
def handle(self, domain, repeater_id, *args, **options): sleep_time = options.get('sleep_time') include_regexps = options.get('include_regexps') exclude_regexps = options.get('exclude_regexps') verbose = options.get('verbose') action = options.get('action') success_message = options.get('success_message') response_status = options.get('response_status') repeater = Repeater.get(repeater_id) print("Looking up repeat records for '{}'".format(repeater.friendly_name)) def meets_filter(record): if exclude_regexps: # Match none of the exclude expressions if record.failure_reason: if any(re.search(exclude_regex, record.failure_reason) for exclude_regex in exclude_regexps): return False if include_regexps: # Match any of the include expressions if not record.failure_reason: return False return any(re.search(include_regex, record.failure_reason) for include_regex in include_regexps) return True # No filter applied records = list(filter( meets_filter, iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE) )) if verbose: for record in records: print(record.payload_id, record.failure_reason) total_records = len(records) print("Found {} matching records. {} them?".format(total_records, action)) if not input("(y/n)") == 'y': print("Aborting") return filename = "{}_{}_records-{}.csv".format( action, repeater.__class__.__name__, datetime.datetime.utcnow().strftime('%Y-%m-%d_%H.%M.%S')) with open(filename, 'w', encoding='utf-8') as f: writer = csv.writer(f) writer.writerow(('record_id', 'payload_id', 'state', 'message')) for i, record in enumerate(records): try: if action == 'retrigger': if record.next_check is None: record.next_check = datetime.datetime.utcnow() record.fire(force_send=True) elif action == 'succeed': self._succeed_record(record, success_message, response_status) except Exception as e: print("{}/{}: {} {}".format(i + 1, total_records, 'EXCEPTION', repr(e))) writer.writerow((record._id, record.payload_id, record.state, repr(e))) else: print("{}/{}: {}, {}".format(i + 1, total_records, record.state, record.attempts[-1].message)) writer.writerow((record._id, record.payload_id, record.state, record.attempts[-1].message)) if sleep_time: time.sleep(float(sleep_time)) print("Wrote log of changes to {}".format(filename))
def obj_get_list(self, bundle, domain, **kwargs): repeaters = Repeater.by_domain(domain) return list(repeaters)
def __str__(self): return Repeater.__str__(self)
def send_repeater_payloads(repeater_id, payload_ids, email_id): from corehq.motech.repeaters.models import Repeater, RepeatRecord repeater = Repeater.get(repeater_id) repeater_type = repeater.doc_type payloads = dict() headers = ['note'] result_file_name = "bulk-payloads-%s-%s-%s.csv" % ( repeater.doc_type, repeater.get_id, datetime.datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S") ) def get_payload(payload_id): dummy_repeat_record = RepeatRecord( domain=repeater.domain, next_check=datetime.datetime.utcnow(), repeater_id=repeater.get_id, repeater_type=repeater_type, payload_id=payload_id, ) payload = repeater.get_payload(dummy_repeat_record) if isinstance(payload, dict): return payload else: return json.loads(payload) def populate_payloads(headers): for payload_id in payload_ids: try: payload = get_payload(payload_id) payloads[payload_id] = payload headers = list(set(headers + list(payload))) except Exception as e: payloads[payload_id] = {'note': 'Could not generate payload, %s' % str(e)} return headers def create_result_file(): _, temp_file_path = tempfile.mkstemp() with open(temp_file_path, 'w') as csvfile: headers.append('payload_id') writer = csv.DictWriter(csvfile, fieldnames=headers) writer.writeheader() for payload_id, payload in payloads.items(): row = payload row['payload_id'] = payload_id writer.writerow(row) return temp_file_path def email_result(download_url): send_HTML_email('Bulk Payload generated for %s' % repeater_type, email_id, 'This email is to just let you know that there is a ' 'download waiting for you at %s. It will expire in 24 hours' % download_url) headers = populate_payloads(headers) temp_file_path = create_result_file() download_url = expose_zipped_blob_download( temp_file_path, result_file_name, Format.CSV, repeater.domain, ) email_result(download_url)
def delete_zombie_repeater_instance(document: dict): assert document['doc_type'] in DELETED_REPEATER_CLASSES db = Repeater.get_db() # Do not delete old repeat records. There could be thousands, and they # are benign because they will not be resent. db.delete_doc(document['_id'])