def handle(self, *args, **options): all_matching_records = ExceptionRecord.view("couchlog/all_by_msg", startkey="problem in form listener", endkey="problem in form listenerz", reduce=False).all() for row in all_matching_records: ExceptionRecord.get_db().delete_doc(row["id"])
def testCreation(self): self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all())) logging.error("Fail!") self.assertEqual(1, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all())) log = ExceptionRecord.view("couchlog/all_by_date", include_docs=True).one() self.assertEqual("Fail!", log.message) self.assertTrue("tests.py" in log.pathname) self.assertFalse(log.archived)
def handle(self, *args, **options): all_matching_records = ExceptionRecord.view( "couchlog/all_by_msg", startkey="problem in form listener", endkey="problem in form listenerz", reduce=False).all() for row in all_matching_records: ExceptionRecord.get_db().delete_doc(row["id"])
def testPostSaveDrugBug(self): self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all())) folder_name = os.path.join(os.path.dirname(__file__), "testdata", "test_post_save_drug_bug") patient = export.import_patient_json_file(os.path.join(folder_name, "patient.json")) updated_patient, form_doc1 = export.add_form_file_to_patient(patient.get_id, os.path.join(folder_name, "001_underfive.xml")) self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all()))
def testThreshold(self): # makes the shady assumption that the couchlog threshold is above debug self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all())) logging.debug("Don't write me to couchlog!") self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all())) # make sure we're not dependent on the root log level logging.root.setLevel(logging.DEBUG) logging.debug("Don't write me to couchlog either!") self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all()))
def get_matching_records(query, include_archived): if config.LUCENE_ENABLED: if not include_archived: query = "%s AND NOT archived" % query limit = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design", q=query, limit=1).total_rows matches = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design", q=query, limit=limit, include_docs=True) return [ExceptionRecord.wrap(res["doc"]) for res in matches] else: if include_archived: return ExceptionRecord.view("couchlog/all_by_msg", reduce=False, key=query, include_docs=True).all() else: return ExceptionRecord.view("couchlog/inbox_by_msg", reduce=False, key=query, include_docs=True).all()
def dashboard(request): """ View all couch error data """ show = request.GET.get("show", "inbox") # there's a post mechanism to do stuff here. currently all it can do is # bulk archive a search if request.method == "POST": op = request.POST.get("op", "") query = request.POST.get("query", "") if query: def get_matching_records(query, include_archived): if config.LUCENE_ENABLED: if not include_archived: query = "%s AND NOT archived" % query limit = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design", q=query, limit=1).total_rows matches = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design", q=query, limit=limit, include_docs=True) return [ExceptionRecord.wrap(res["doc"]) for res in matches] else: if include_archived: return ExceptionRecord.view("couchlog/all_by_msg", reduce=False, key=query, include_docs=True).all() else: return ExceptionRecord.view("couchlog/inbox_by_msg", reduce=False, key=query, include_docs=True).all() if op == "bulk_archive": records = get_matching_records(query, False) for record in records: record.archived = True ExceptionRecord.bulk_save(records) messages.success(request, "%s records successfully archived." % len(records)) elif op == "bulk_delete": records = get_matching_records(query, show != "inbox") rec_json_list = [record.to_json() for record in records] ExceptionRecord.get_db().bulk_delete(rec_json_list) messages.success(request, "%s records successfully deleted." % len(records)) single_url_base = reverse('couchlog_single', args=['throwaway']).replace('throwaway/', '') return render_to_response('couchlog/dashboard.html', {"show" : show, "count": True, "lucene_enabled": config.LUCENE_ENABLED, "support_email": config.SUPPORT_EMAIL, "config": config.COUCHLOG_TABLE_CONFIG, "display_cols": config.COUCHLOG_DISPLAY_COLS, "single_url_base": single_url_base, "couchlog_config": config}, context_instance=RequestContext(request))
def handle(self, *args, **options): count = 1 filename = 'cases_with_mismatched_forms.log' dump_logs_to_file(filename) seen_cases = set() cases_rebuilt = set() with open(filename, 'r') as file: while True: line = file.readline() if not line: return print 'Processing record {}'.format(count) record = json.loads(line) exception = ExceptionRecord.wrap(record['exception']) case_id = record.get('case_id', None) if not case_id or case_id in seen_cases: count += 1 archive_exception(exception) continue try: seen_cases.add(case_id) if should_rebuild(case_id): cases_rebuilt.add(case_id) rebuild_case_from_forms(case_id) print 'rebuilt case {}'.format(case_id) archive_exception(exception) except Exception, e: logging.exception("couldn't rebuild case {id}. {msg}".format(id=case_id, msg=str(e))) finally: count += 1
def get_results(key): return ExceptionRecord.view( "couchlog/all_by_date", reduce=False, endkey=[key.isoformat()], limit=1000, include_docs=False)
def dump_logs_to_file(filename): print 'Writing logs to file' paginator = SearchPaginator(ExceptionRecord.get_db(), FULL_SEARCH) records_written = 0 try: with open(filename, 'w') as log_file: while True: page = paginator.next_page() if not page: return for row in page: record = row_to_record(row) if record['exception']['archived']: continue if 'case_id' not in record and SEARCH_KEY not in record['exception']['message']: # search results are no longer relevant return log_file.write('{}\n'.format(json.dumps(record))) records_written += 1 if records_written % 100 == 0: print '{} records written to file'.format(records_written) finally: print '{} records written to file'.format(records_written)
def single(request, log_id, display="full"): log = ExceptionRecord.get(log_id) if request.method == "POST": action = request.POST.get("action", None) username = request.user.username if request.user and not request.user.is_anonymous() else "unknown" if action == "delete": log.delete() messages.success(request, "Log was deleted!") return HttpResponseRedirect(reverse("couchlog_home")) elif action == "archive": log.archive(username) messages.success(request, "Log was archived!") elif action == "move_to_inbox": log.reopen(username) messages.success(request, "Log was moved!") if display == "ajax": template = "couchlog/ajax/single.html" elif display == "full": template = "couchlog/single.html" else: raise ValueError("Unknown display type: %s" % display) return render_to_response(template, {"log": log, "couchlog_config": config}, context_instance=RequestContext(request))
def testFromException(self): self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all())) class CouchLogTestException(Exception): pass try: raise CouchLogTestException("Exceptional fail!") except Exception, e: logging.exception("some other message")
def update(request): """ Update a couch log. """ id = request.POST["id"] action = request.POST["action"] if not id: raise Exception("no id!") log = ExceptionRecord.get(id) username = request.user.username if request.user and not request.user.is_anonymous() else "unknown" if action == "archive": log.archive(username) text = "archived! press to undo" next_action = "move_to_inbox" elif action == "move_to_inbox": log.reopen(username) text = "moved! press to undo" next_action = "archive" elif action == "delete": log.delete() text = "deleted!" next_action = "" to_return = {"id": id, "text": text, "next_action": next_action, "action": action, "style_class": "archived" if log.archived else "inbox"} return HttpResponse(json.dumps(to_return))
def purge_old_logs(): key = datetime.now() - timedelta(weeks=52) results = ExceptionRecord.view( "couchlog/all_by_date", reduce=False, startkey=[key.isoformat()], descending=True, limit=1000, include_docs=False) db = ExceptionRecord.get_db() docs = [] for result in results: docs.append({ '_id': result['id'], '_rev': db.get_rev(result['id']), '_deleted': True, }) db.bulk_save(docs, use_uuids=False)
def row_to_record(row): doc = ExceptionRecord.wrap(row["doc"]) domain = doc.domain if hasattr(doc, "domain") else "" try: domain, case_id = domain.split(',') except ValueError: return {'exception': doc.to_json()} return { 'domain': domain, 'case_id': case_id, 'exception': doc.to_json() }
def wrapper(row): id = row["id"] doc = ExceptionRecord.get(id) domain = doc.domain if hasattr(doc, "domain") else "" try: domain, case_id = domain.split(',') except ValueError: return {'exception': doc} return { 'domain': domain, 'case_id': case_id, 'exception': doc }
def lucene_search(request, search_key, show_all): def wrapper(row): id = row["id"] doc = ExceptionRecord.get(id) return _record_to_json(doc) if not show_all: search_key = "%s AND NOT archived" % search_key total_records = _couchlog_count() paginator = LucenePaginator(config.COUCHLOG_LUCENE_VIEW, wrapper, database=ExceptionRecord.get_db()) return paginator.get_ajax_response(request, search_key, extras={"iTotalRecords": total_records})
def setUp(self): # We want to support Python 2.6 a bit longer so we cannot use dictConfig here... # but it is so handy that we put it in settings.py instead of wrestle with crappy # imperative config self.logger = logging.getLogger('couchlog.tests') self.original_log_level = logging.root.getEffectiveLevel() for handler in list(self.logger.handlers): if isinstance(handler, CouchHandler): self.logger.removeHandler(handler) logging.root.setLevel(logging.ERROR) init_handler() self.db = ExceptionRecord.get_db() for row in self.db.view("couchlog/all_by_date").all(): safe_delete(self.db, row['id'])
def purge_old_logs(): key = datetime.utcnow() - timedelta(weeks=52) db = ExceptionRecord.get_db() results = get_results(key) while results.count(): docs = [] for result in results: try: rev = db.get_rev(result["id"]) except ResourceNotFound: pass else: docs.append({"_id": result["id"], "_rev": rev, "_deleted": True}) db.bulk_save(docs, use_uuids=False) results = get_results(key)
def paging(request): # what to show query = request.POST if request.method == "POST" else request.GET search_key = query.get("sSearch", "") show_all = query.get("show", "inbox") == "all" if search_key: if config.LUCENE_ENABLED: return lucene_search(request, search_key, show_all) view_name = "couchlog/all_by_msg" if show_all else "couchlog/inbox_by_msg" search = True else: view_name = "couchlog/all_by_date" if show_all else "couchlog/inbox_by_date" search = False def wrapper_func(row): """ Given a row of the view, get out an exception record """ error = ExceptionRecord.wrap(row["doc"]) return _record_to_json(error) paginator = CouchPaginator(view_name, wrapper_func, search=search, view_args={"include_docs": True}, database=ExceptionRecord.get_db()) # get our previous start/end keys if necessary # NOTE: we don't actually do anything with these yet, but we should for # better pagination down the road. using the "skip" parameter is not # super efficient. startkey = query.get("startkey", None) if startkey: startkey = json.loads(startkey) endkey = query.get("endkey", None) if endkey: endkey = json.loads(endkey) total_records = _couchlog_count() return paginator.get_ajax_response(request, extras={"startkey": startkey, "endkey": endkey, "iTotalRecords": total_records})
def get_records_to_process(search_key, batch_size): def wrapper(row): id = row["id"] doc = ExceptionRecord.get(id) domain = doc.domain if hasattr(doc, "domain") else "" try: domain, case_id = domain.split(',') except ValueError: return {'exception': doc} return { 'domain': domain, 'case_id': case_id, 'exception': doc } search_key = "%s AND NOT archived" % search_key paginator = LucenePaginator(SEARCH_VIEW_NAME, wrapper, database=ExceptionRecord.get_db()) return paginator.get_results(search_key, batch_size, 0)
def next_page(self): extra = {} if self.bookmark: extra['bookmark'] = self.bookmark result = ExceptionRecord.get_db().search( 'couchlog/_search/search', handler='_design', q=self.query, include_docs=True, limit=self.page_size, **extra ) try: result.fetch() self.bookmark = result._result_cache.get('bookmark') return result except RequestFailed: # ignore for now return []
def purge_old_logs(): key = datetime.utcnow() - timedelta(weeks=52) db = ExceptionRecord.get_db() results = get_results(key) while results.count(): docs = [] for result in results: try: rev = db.get_rev(result['id']) except ResourceNotFound: pass else: docs.append({ '_id': result['id'], '_rev': rev, '_deleted': True, }) db.bulk_save(docs, use_uuids=False) results = get_results(key)
def email(request): """ Update a couch log. """ id = request.POST["id"] to = request.POST["to"].split(",") notes = request.POST["notes"] log = ExceptionRecord.get(id) if request.user and not request.user.is_anonymous(): name = request.user.get_full_name() username = request.user.username reply_to = "%s <%s>" % (request.user.get_full_name(), request.user.email) else: name = "" username = "******" reply_to = config.SUPPORT_EMAIL url = "{}{}".format(get_url_base(), reverse("couchlog_single", args=[id])) email_body = render_to_string("couchlog/email.txt", {"user_info": "%s (%s)" % (name, username), "notes": notes, "exception_url": url}) try: email = EmailMessage("[COUCHLOG ERROR] %s" % Truncator(log.message).words(10), email_body, "%s <%s>" % (name, config.SUPPORT_EMAIL), to, headers = {'Reply-To': reply_to}) email.send(fail_silently=False) return HttpResponse(json.dumps({"id": id, "success": True})) except Exception, e: logging.exception("problem sending couchlog mail") return HttpResponse(json.dumps({"id": id, "success": False, "message": str(e)}))
def wrapper(row): id = row["id"] doc = ExceptionRecord.get(id) return _record_to_json(doc)
def setUp(self): db = ExceptionRecord.get_db() for row in db.view("couchlog/all_by_date").all(): safe_delete(db, row['id'])
def testSettingsInfo(self): self.assertEqual(0, len(ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all())) try: raise Exception("Fail!") except Exception, e: logging.exception("This is another message")
def setUp(self): for item in ExceptionRecord.view("couchlog/all_by_date", include_docs=True).all(): item.delete()
def log_request_exception(sender, request, **kwargs): from couchlog.models import ExceptionRecord record = ExceptionRecord.from_request_exception(request) record.save()
def log_standard_exception(sender, exc_info, **kwargs): from couchlog.models import ExceptionRecord record = ExceptionRecord.from_exc_info(exc_info) record.save()
def _couchlog_count(): count_results = ExceptionRecord.get_db().view("couchlog/count").one() return count_results["value"] if count_results else 0
def wrapper_func(row): """ Given a row of the view, get out an exception record """ error = ExceptionRecord.wrap(row["doc"]) return _record_to_json(error)