Exemplo n.º 1
0
 def obj_update(self, bundle, **kwargs):
     bundle.obj = Repeater.get(kwargs['pk'])
     assert bundle.obj.domain == kwargs['domain']
     bundle = self._update(bundle)
     assert bundle.obj.domain == kwargs['domain']
     bundle.obj.save()
     return bundle
Exemplo n.º 2
0
 def obj_update(self, bundle, **kwargs):
     bundle.obj = Repeater.get(kwargs['pk'])
     assert bundle.obj.domain == kwargs['domain']
     bundle = self._update(bundle)
     assert bundle.obj.domain == kwargs['domain']
     bundle.obj.save()
     return bundle
Exemplo n.º 3
0
def send_repeater_payloads(repeater_id, payload_ids, email_id):
    from corehq.motech.repeaters.models import Repeater, RepeatRecord
    repeater = Repeater.get(repeater_id)
    repeater_type = repeater.doc_type
    payloads = dict()
    headers = ['note']
    result_file_name = "bulk-payloads-%s-%s-%s.csv" % (
        repeater.doc_type, repeater.get_id,
        datetime.datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))

    def get_payload(payload_id):
        dummy_repeat_record = RepeatRecord(
            domain=repeater.domain,
            next_check=datetime.datetime.utcnow(),
            repeater_id=repeater.get_id,
            repeater_type=repeater_type,
            payload_id=payload_id,
        )
        payload = repeater.get_payload(dummy_repeat_record)
        if isinstance(payload, dict):
            return payload
        else:
            return json.loads(payload)

    def populate_payloads(headers):
        for payload_id in payload_ids:
            try:
                payload = get_payload(payload_id)
                payloads[payload_id] = payload
                headers = list(set(headers + payload.keys()))
            except Exception as e:
                payloads[payload_id] = {
                    'note': 'Could not generate payload, %s' % str(e)
                }
        return headers

    def create_result_file():
        _, temp_file_path = tempfile.mkstemp()
        with open(temp_file_path, 'w') as csvfile:
            headers.append('payload_id')
            writer = csv.DictWriter(csvfile, fieldnames=headers)
            writer.writeheader()
            for payload_id, payload in payloads.items():
                row = payload
                row['payload_id'] = payload_id
                writer.writerow(row)
        return temp_file_path

    def email_result(download_url):
        send_HTML_email(
            'Bulk Payload generated for %s' % repeater_type, email_id,
            'This email is to just let you know that there is a '
            'download waiting for you at %s. It will expire in 24 hours' %
            download_url)

    headers = populate_payloads(headers)
    temp_file_path = create_result_file()
    download_url = ExposeBlobDownload().get_link(temp_file_path,
                                                 result_file_name, Format.CSV)
    email_result(download_url)
    def create_repeat_records_on_dest_repeater(self, source_repeater_id,
                                               dest_repeater_id, state):
        dest_repeater = Repeater.get(dest_repeater_id)
        retriggered = set()

        records = iter_repeat_records_by_domain(domain,
                                                repeater_id=source_repeater_id,
                                                state=state)
        record_count = get_repeat_record_count(domain,
                                               repeater_id=source_repeater_id,
                                               state=state)
        accessor = CaseAccessors(domain)
        print("Iterating over records and adding new record for them")
        for record in with_progress_bar(records, length=record_count):
            if record.payload_id in retriggered:
                self.record_failure(record.get_id,
                                    record.payload_id,
                                    error_message="Already triggered")
                continue
            try:
                episode = accessor.get_case(record.payload_id)
                episode_case_properties = episode.dynamic_case_properties()
                if (episode_case_properties.get('nikshay_registered', 'false')
                        == 'false' and episode_case_properties.get(
                            'private_nikshay_registered', 'false') == 'false'
                        and not episode_case_properties.get('nikshay_id')
                        and episode_case_properties.get('episode_type')
                        == 'confirmed_tb'
                        and is_valid_episode_submission(episode)):
                    new_record = RepeatRecord(
                        domain=domain,
                        next_check=datetime.utcnow(),
                        repeater_id=dest_repeater_id,
                        repeater_type=dest_repeater.doc_type,
                        payload_id=record.payload_id,
                    )
                    if not self.dry_run:
                        new_record.save()
                    retriggered.add(record.payload_id)
                    self.add_row(
                        record,
                        episode_case_properties.get('migration_created_case'),
                        new_record.get_id)
                else:
                    self.record_failure(record.get_id,
                                        record.payload_id,
                                        error_message="Not to be re-triggered")
            except Exception as e:
                self.record_failure(record.get_id,
                                    record.payload_id,
                                    error_message="{error}: {message}".format(
                                        error=e.__name__, message=e.message))
Exemplo n.º 5
0
    def handle(self, domain, repeater_id, *args, **options):
        self.domain = domain
        self.repeater_id = repeater_id
        repeater = Repeater.get(repeater_id)
        print("Looking up repeat records for '{}'".format(
            repeater.friendly_name))

        redundant_records = []
        records_by_payload_id = defaultdict(list)
        records = iter_repeat_records_by_domain(domain,
                                                repeater_id=repeater_id,
                                                state=RECORD_CANCELLED_STATE)
        total_records = 0
        for record in records:
            total_records += 1
            most_recent_success = self.most_recent_success.get(
                record.payload_id)
            if most_recent_success and record.last_checked < most_recent_success:
                # another record with this payload has succeeded after this record failed
                redundant_records.append(record)
            else:
                records_by_payload_id[record.payload_id].append(record)

        unique_payloads = len(records_by_payload_id)
        redundant_payloads = len(redundant_records)
        print(
            "There are {total} total cancelled records, {redundant} with payloads which "
            "have since succeeded, and {unique} unsent unique payload ids.".
            format(total=total_records,
                   redundant=redundant_payloads,
                   unique=unique_payloads))
        print("Delete {} duplicate records?".format(total_records -
                                                    unique_payloads))
        if not input("(y/n)") == 'y':
            print("Aborting")
            return

        redundant_log = self.delete_already_successful_records(
            redundant_records)
        duplicates_log = self.resolve_duplicates(records_by_payload_id)

        filename = "cancelled_{}_records-{}.csv".format(
            repeater.__class__.__name__,
            datetime.datetime.utcnow().isoformat())
        print("Writing log of changes to {}".format(filename))
        with open(filename, 'w') as f:
            writer = csv.writer(f)
            writer.writerow(('RepeatRecord ID', 'Payload ID', 'Failure Reason',
                             'Deleted?', 'Reason'))
            writer.writerows(redundant_log)
            writer.writerows(duplicates_log)
Exemplo n.º 6
0
 def add_repeater_form(self):
     if self.request.method == 'POST':
         return self.repeater_form_class(self.request.POST,
                                         domain=self.domain,
                                         repeater_class=self.repeater_class)
     else:
         repeater_id = self.kwargs['repeater_id']
         repeater = Repeater.get(repeater_id)
         return self.repeater_form_class(
             domain=self.domain,
             repeater_class=self.repeater_class,
             data=repeater.to_json(),
             submit_btn_text=_("Update Repeater"),
         )
Exemplo n.º 7
0
 def add_repeater_form(self):
     if self.request.method == 'POST':
         return self.repeater_form_class(self.request.POST,
                                         domain=self.domain,
                                         repeater_class=self.repeater_class)
     else:
         repeater_id = self.kwargs['repeater_id']
         repeater = Repeater.get(repeater_id)
         data = repeater.to_json()
         data['password'] = PASSWORD_PLACEHOLDER
         return self.repeater_form_class(
             domain=self.domain,
             repeater_class=self.repeater_class,
             data=data,
             submit_btn_text=_("Update Repeater"),
         )
Exemplo n.º 8
0
 def add_repeater_form(self):
     if self.request.method == 'POST':
         return self.repeater_form_class(
             self.request.POST,
             domain=self.domain,
             repeater_class=self.repeater_class
         )
     else:
         repeater_id = self.kwargs['repeater_id']
         repeater = Repeater.get(repeater_id)
         data = repeater.to_json()
         data['password'] = PASSWORD_PLACEHOLDER
         return self.repeater_form_class(
             domain=self.domain,
             repeater_class=self.repeater_class,
             data=data,
             submit_btn_text=_("Update Repeater"),
         )
    def handle(self, domain, repeater_id, *args, **options):
        self.domain = domain
        self.repeater_id = repeater_id
        repeater = Repeater.get(repeater_id)
        print("Looking up repeat records for '{}'".format(repeater.friendly_name))

        redundant_records = []
        records_by_payload_id = defaultdict(list)
        records = iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE)
        total_records = 0
        for record in records:
            total_records += 1
            most_recent_success = self.most_recent_success.get(record.payload_id)
            if most_recent_success and record.last_checked < most_recent_success:
                # another record with this payload has succeeded after this record failed
                redundant_records.append(record)
            else:
                records_by_payload_id[record.payload_id].append(record)

        unique_payloads = len(records_by_payload_id)
        redundant_payloads = len(redundant_records)
        print ("There are {total} total cancelled records, {redundant} with payloads which "
               "have since succeeded, and {unique} unsent unique payload ids."
               .format(total=total_records,
                       redundant=redundant_payloads,
                       unique=unique_payloads))
        print("Delete {} duplicate records?".format(total_records - unique_payloads))
        if not input("(y/n)") == 'y':
            print("Aborting")
            return

        redundant_log = self.delete_already_successful_records(redundant_records)
        duplicates_log = self.resolve_duplicates(records_by_payload_id)

        filename = "cancelled_{}_records-{}.csv".format(
            repeater.__class__.__name__,
            datetime.datetime.utcnow().isoformat())
        print("Writing log of changes to {}".format(filename))
        with open(filename, 'w', encoding='utf-8') as f:
            writer = csv.writer(f)
            writer.writerow(('RepeatRecord ID', 'Payload ID', 'Failure Reason', 'Deleted?', 'Reason'))
            writer.writerows(redundant_log)
            writer.writerows(duplicates_log)
Exemplo n.º 10
0
def resume_repeater(request, domain, repeater_id):
    rep = Repeater.get(repeater_id)
    rep.resume()
    messages.success(request, "Forwarding resumed!")
    return HttpResponseRedirect(
        reverse(DomainForwardingOptionsView.urlname, args=[domain]))
Exemplo n.º 11
0
 def initialize_repeater(self):
     return Repeater.get(self.kwargs['repeater_id'])
Exemplo n.º 12
0
 def initialize_repeater(self):
     return Repeater.get(self.kwargs['repeater_id'])
Exemplo n.º 13
0
def resume_repeater(request, domain, repeater_id):
    rep = Repeater.get(repeater_id)
    rep.resume()
    messages.success(request, "Forwarding resumed!")
    return HttpResponseRedirect(reverse(DomainForwardingOptionsView.urlname, args=[domain]))
Exemplo n.º 14
0
    def handle(self, domain, repeater_id, *args, **options):
        sleep_time = options.get('sleep_time')
        include_regexps = options.get('include_regexps')
        exclude_regexps = options.get('exclude_regexps')
        verbose = options.get('verbose')
        action = options.get('action')
        success_message = options.get('success_message')
        response_status = options.get('response_status')

        repeater = Repeater.get(repeater_id)
        print("Looking up repeat records for '{}'".format(
            repeater.friendly_name))

        def meets_filter(record):
            if exclude_regexps:  # Match none of the exclude expressions
                if record.failure_reason:
                    if any(
                            re.search(exclude_regex, record.failure_reason)
                            for exclude_regex in exclude_regexps):
                        return False

            if include_regexps:  # Match any of the include expressions
                if not record.failure_reason:
                    return False
                return any(
                    re.search(include_regex, record.failure_reason)
                    for include_regex in include_regexps)
            return True  # No filter applied

        records = list(
            filter(
                meets_filter,
                iter_repeat_records_by_domain(domain,
                                              repeater_id=repeater_id,
                                              state=RECORD_CANCELLED_STATE)))

        if verbose:
            for record in records:
                print(record.payload_id, record.failure_reason)

        total_records = len(records)
        print("Found {} matching records.  {} them?".format(
            total_records, action))
        if not input("(y/n)") == 'y':
            print("Aborting")
            return

        filename = "{}_{}_records-{}.csv".format(
            action, repeater.__class__.__name__,
            datetime.datetime.utcnow().strftime('%Y-%m-%d_%H.%M.%S'))
        with open(filename, 'w', encoding='utf-8') as f:
            writer = csv.writer(f)
            writer.writerow(('record_id', 'payload_id', 'state', 'message'))

            for i, record in enumerate(records):
                try:
                    if action == 'retrigger':
                        if record.next_check is None:
                            record.next_check = datetime.datetime.utcnow()
                        record.fire(force_send=True)
                    elif action == 'succeed':
                        self._succeed_record(record, success_message,
                                             response_status)
                except Exception as e:
                    print("{}/{}: {} {}".format(i + 1, total_records,
                                                'EXCEPTION', repr(e)))
                    writer.writerow(
                        (record._id, record.payload_id, record.state, repr(e)))
                else:
                    print("{}/{}: {}, {}".format(i + 1, total_records,
                                                 record.state,
                                                 record.attempts[-1].message))
                    writer.writerow(
                        (record._id, record.payload_id, record.state,
                         record.attempts[-1].message))
                if sleep_time:
                    time.sleep(float(sleep_time))

        print("Wrote log of changes to {}".format(filename))
Exemplo n.º 15
0
    def handle(self, file_path, *args, **options):
        with open(file_path, 'r') as file:
            doc_ids = [line.strip() for line in file.readlines()]

        repeater_type = options['repeater_type']
        repeater_id = options['repeater_id']
        repeater_name_re = None
        if options['repeater_name']:
            repeater_name_re = re.compile(options['repeater_name'])

        if repeater_id:
            repeater = Repeater.get(repeater_id)
            if repeater_type and repeater_type != repeater.doc_type:
                raise CommandError(f"Repeater type does not match: {repeater_type} != {repeater.doc_type}")
            if repeater_name_re and not repeater_name_re.match(repeater.name):
                raise CommandError(f"Repeater name does not match: {repeater.name}")

            def _get_repeaters(doc):
                assert doc.domain == repeater.domain
                return [repeater]
        else:
            by_domain = {}

            def _get_repeaters(doc):
                if doc.domain not in by_domain:
                    repeater_class = get_all_repeater_types()[repeater_type] if repeater_type else None
                    repeaters = get_repeaters_for_type_in_domain(doc.domain, repeater_class)
                    if repeater_name_re:
                        repeaters = [r for r in repeaters if repeater_name_re.match(r.name)]

                    if not repeaters:
                        logger.info(f"No repeaters matched for domain '{doc.domain}'")
                    by_domain[doc.domain] = repeaters
                return by_domain[doc.domain]

        def doc_iterator(doc_ids):
            try:
                yield from bulk_accessor(doc_ids)
            except Exception:
                logger.exception("Unable to fetch bulk docs, falling back to individual fetches")
                for doc_id in doc_ids:
                    try:
                        yield single_accessor(doc_id)
                    except Exception:
                        logger.exception(f"Unable to fetch doc '{doc_id}'")

        forms = XFormInstance.objects
        cases = CommCareCase.objects
        bulk_accessor = forms.get_forms if options['doc_type'] == 'form' else cases.get_cases
        single_accessor = forms.get_form if options['doc_type'] == 'form' else cases.get_case
        for doc_ids in chunked(with_progress_bar(doc_ids), 100):
            for doc in doc_iterator(list(doc_ids)):
                try:
                    repeaters = _get_repeaters(doc)
                except Exception:
                    logger.exception(f"Unable to fetch repeaters for doc '{doc.get_id}'")
                    continue

                for repeater in repeaters:
                    try:
                        repeater.register(doc)
                    except Exception:
                        logger.exception(f"Unable to create records for doc '{doc.get_id}'")
Exemplo n.º 16
0
    def handle(self, domain, repeater_id, *args, **options):
        sleep_time = options.get('sleep_time')
        include_regexps = options.get('include_regexps')
        exclude_regexps = options.get('exclude_regexps')
        verbose = options.get('verbose')
        action = options.get('action')
        success_message = options.get('success_message')
        response_status = options.get('response_status')

        repeater = Repeater.get(repeater_id)
        print("Looking up repeat records for '{}'".format(repeater.friendly_name))

        def meets_filter(record):
            if exclude_regexps:  # Match none of the exclude expressions
                if record.failure_reason:
                    if any(re.search(exclude_regex, record.failure_reason)
                           for exclude_regex in exclude_regexps):
                        return False

            if include_regexps:  # Match any of the include expressions
                if not record.failure_reason:
                    return False
                return any(re.search(include_regex, record.failure_reason)
                           for include_regex in include_regexps)
            return True  # No filter applied

        records = list(filter(
            meets_filter,
            iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE)
        ))

        if verbose:
            for record in records:
                print(record.payload_id, record.failure_reason)

        total_records = len(records)
        print("Found {} matching records.  {} them?".format(total_records, action))
        if not input("(y/n)") == 'y':
            print("Aborting")
            return

        filename = "{}_{}_records-{}.csv".format(
            action,
            repeater.__class__.__name__,
            datetime.datetime.utcnow().strftime('%Y-%m-%d_%H.%M.%S'))
        with open(filename, 'w', encoding='utf-8') as f:
            writer = csv.writer(f)
            writer.writerow(('record_id', 'payload_id', 'state', 'message'))

            for i, record in enumerate(records):
                try:
                    if action == 'retrigger':
                        if record.next_check is None:
                            record.next_check = datetime.datetime.utcnow()
                        record.fire(force_send=True)
                    elif action == 'succeed':
                        self._succeed_record(record, success_message, response_status)
                except Exception as e:
                    print("{}/{}: {} {}".format(i + 1, total_records, 'EXCEPTION', repr(e)))
                    writer.writerow((record._id, record.payload_id, record.state, repr(e)))
                else:
                    print("{}/{}: {}, {}".format(i + 1, total_records, record.state, record.attempts[-1].message))
                    writer.writerow((record._id, record.payload_id, record.state, record.attempts[-1].message))
                if sleep_time:
                    time.sleep(float(sleep_time))

        print("Wrote log of changes to {}".format(filename))
Exemplo n.º 17
0
def send_repeater_payloads(repeater_id, payload_ids, email_id):
    from corehq.motech.repeaters.models import Repeater, RepeatRecord
    repeater = Repeater.get(repeater_id)
    repeater_type = repeater.doc_type
    payloads = dict()
    headers = ['note']
    result_file_name = "bulk-payloads-%s-%s-%s.csv" % (
        repeater.doc_type, repeater.get_id,
        datetime.datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")
    )

    def get_payload(payload_id):
        dummy_repeat_record = RepeatRecord(
            domain=repeater.domain,
            next_check=datetime.datetime.utcnow(),
            repeater_id=repeater.get_id,
            repeater_type=repeater_type,
            payload_id=payload_id,
        )
        payload = repeater.get_payload(dummy_repeat_record)
        if isinstance(payload, dict):
            return payload
        else:
            return json.loads(payload)

    def populate_payloads(headers):
        for payload_id in payload_ids:
            try:
                payload = get_payload(payload_id)
                payloads[payload_id] = payload
                headers = list(set(headers + list(payload)))
            except Exception as e:
                payloads[payload_id] = {'note': 'Could not generate payload, %s' % str(e)}
        return headers

    def create_result_file():
        _, temp_file_path = tempfile.mkstemp()
        with open(temp_file_path, 'w') as csvfile:
            headers.append('payload_id')
            writer = csv.DictWriter(csvfile, fieldnames=headers)
            writer.writeheader()
            for payload_id, payload in payloads.items():
                row = payload
                row['payload_id'] = payload_id
                writer.writerow(row)
        return temp_file_path

    def email_result(download_url):
        send_HTML_email('Bulk Payload generated for %s' % repeater_type,
                        email_id,
                        'This email is to just let you know that there is a '
                        'download waiting for you at %s. It will expire in 24 hours' % download_url)

    headers = populate_payloads(headers)
    temp_file_path = create_result_file()
    download_url = expose_zipped_blob_download(
        temp_file_path,
        result_file_name,
        Format.CSV,
        repeater.domain,
    )
    email_result(download_url)