def handle(self, *args, **options): count = 1 filename = 'cases_with_mismatched_forms.log' dump_logs_to_file(filename) seen_cases = set() cases_rebuilt = set() with open(filename, 'r') as file: while True: line = file.readline() if not line: return print 'Processing record {}'.format(count) record = json.loads(line) exception = ExceptionRecord.wrap(record['exception']) case_id = record.get('case_id', None) if not case_id or case_id in seen_cases: count += 1 archive_exception(exception) continue try: seen_cases.add(case_id) if should_rebuild(case_id): cases_rebuilt.add(case_id) rebuild_case_from_forms(case_id) print 'rebuilt case {}'.format(case_id) archive_exception(exception) except Exception, e: logging.exception("couldn't rebuild case {id}. {msg}".format(id=case_id, msg=str(e))) finally: count += 1
def row_to_record(row): doc = ExceptionRecord.wrap(row["doc"]) domain = doc.domain if hasattr(doc, "domain") else "" try: domain, case_id = domain.split(',') except ValueError: return {'exception': doc.to_json()} return { 'domain': domain, 'case_id': case_id, 'exception': doc.to_json() }
def get_matching_records(query, include_archived): if config.LUCENE_ENABLED: if not include_archived: query = "%s AND NOT archived" % query limit = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design", q=query, limit=1).total_rows matches = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design", q=query, limit=limit, include_docs=True) return [ExceptionRecord.wrap(res["doc"]) for res in matches] else: if include_archived: return ExceptionRecord.view("couchlog/all_by_msg", reduce=False, key=query, include_docs=True).all() else: return ExceptionRecord.view("couchlog/inbox_by_msg", reduce=False, key=query, include_docs=True).all()
def wrapper_func(row): """ Given a row of the view, get out an exception record """ error = ExceptionRecord.wrap(row["doc"]) return _record_to_json(error)