Example #1
0
 def handle(self, *args, **options):
     all_matching_records = ExceptionRecord.view("couchlog/all_by_msg", 
                              startkey="problem in form listener",
                              endkey="problem in form listenerz",
                              reduce=False).all()
     for row in all_matching_records:
         ExceptionRecord.get_db().delete_doc(row["id"])
Example #2
0
 def handle(self, *args, **options):
     all_matching_records = ExceptionRecord.view(
         "couchlog/all_by_msg",
         startkey="problem in form listener",
         endkey="problem in form listenerz",
         reduce=False).all()
     for row in all_matching_records:
         ExceptionRecord.get_db().delete_doc(row["id"])
Example #3
0
 def get_matching_records(query, include_archived):
     if config.LUCENE_ENABLED:
         if not include_archived:
             query = "%s AND NOT archived" % query
         limit = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design",
                                 q=query, limit=1).total_rows
         matches = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design",
                                   q=query, limit=limit, include_docs=True)
         return [ExceptionRecord.wrap(res["doc"]) for res in matches]
         
     else:
         if include_archived:
             return ExceptionRecord.view("couchlog/all_by_msg", reduce=False, key=query, include_docs=True).all() 
         else:
             return ExceptionRecord.view("couchlog/inbox_by_msg", reduce=False, key=query, include_docs=True).all() 
Example #4
0
def dashboard(request):
    """
    View all couch error data
    """
    show = request.GET.get("show", "inbox")
    # there's a post mechanism to do stuff here.  currently all it can do is 
    # bulk archive a search
    if request.method == "POST":
        op = request.POST.get("op", "")
        query = request.POST.get("query", "")
        if query:
            def get_matching_records(query, include_archived):
                if config.LUCENE_ENABLED:
                    if not include_archived:
                        query = "%s AND NOT archived" % query
                    limit = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design",
                                            q=query, limit=1).total_rows
                    matches = ExceptionRecord.get_db().search(config.COUCHLOG_LUCENE_VIEW, handler="_fti/_design",
                                              q=query, limit=limit, include_docs=True)
                    return [ExceptionRecord.wrap(res["doc"]) for res in matches]
                    
                else:
                    if include_archived:
                        return ExceptionRecord.view("couchlog/all_by_msg", reduce=False, key=query, include_docs=True).all() 
                    else:
                        return ExceptionRecord.view("couchlog/inbox_by_msg", reduce=False, key=query, include_docs=True).all() 
            if op == "bulk_archive":
                records = get_matching_records(query, False)
                for record in records:
                    record.archived = True
                ExceptionRecord.bulk_save(records)    
                messages.success(request, "%s records successfully archived." % len(records))
            elif op == "bulk_delete":
                records = get_matching_records(query, show != "inbox")
                rec_json_list = [record.to_json() for record in records]
                ExceptionRecord.get_db().bulk_delete(rec_json_list)
                messages.success(request, "%s records successfully deleted." % len(records))
    
    single_url_base = reverse('couchlog_single', args=['throwaway']).replace('throwaway/', '')
    return render_to_response('couchlog/dashboard.html',
                              {"show" : show, "count": True,
                               "lucene_enabled": config.LUCENE_ENABLED,
                               "support_email": config.SUPPORT_EMAIL,
                               "config": config.COUCHLOG_TABLE_CONFIG,
                               "display_cols": config.COUCHLOG_DISPLAY_COLS,
                               "single_url_base": single_url_base,
                               "couchlog_config": config},
                               context_instance=RequestContext(request))
def dump_logs_to_file(filename):
    print 'Writing logs to file'
    paginator = SearchPaginator(ExceptionRecord.get_db(), FULL_SEARCH)

    records_written = 0
    try:
        with open(filename, 'w') as log_file:
            while True:
                page = paginator.next_page()
                if not page:
                    return

                for row in page:
                    record = row_to_record(row)
                    if record['exception']['archived']:
                        continue
                    if 'case_id' not in record and SEARCH_KEY not in record['exception']['message']:
                        # search results are no longer relevant
                        return

                    log_file.write('{}\n'.format(json.dumps(record)))
                    records_written += 1
                    if records_written % 100 == 0:
                        print '{} records written to file'.format(records_written)
    finally:
        print '{} records written to file'.format(records_written)
Example #6
0
def lucene_search(request, search_key, show_all):
    
    def wrapper(row):
        id = row["id"]
        doc = ExceptionRecord.get(id)
        return _record_to_json(doc)
    
    if not show_all:
        search_key = "%s AND NOT archived" % search_key
    
    total_records = _couchlog_count()
    paginator = LucenePaginator(config.COUCHLOG_LUCENE_VIEW, wrapper, 
                                database=ExceptionRecord.get_db())
    return paginator.get_ajax_response(request, search_key, extras={"iTotalRecords": total_records})
Example #7
0
    def setUp(self):
        # We want to support Python 2.6 a bit longer so we cannot use dictConfig here...
        # but it is so handy that we put it in settings.py instead of wrestle with crappy
        # imperative config
        self.logger = logging.getLogger('couchlog.tests')
        self.original_log_level = logging.root.getEffectiveLevel()
        for handler in list(self.logger.handlers):
            if isinstance(handler, CouchHandler):
                self.logger.removeHandler(handler)
        logging.root.setLevel(logging.ERROR)
        init_handler()

        self.db = ExceptionRecord.get_db()
        for row in self.db.view("couchlog/all_by_date").all():
            safe_delete(self.db, row['id'])
Example #8
0
def purge_old_logs():
    key = datetime.utcnow() - timedelta(weeks=52)

    db = ExceptionRecord.get_db()

    results = get_results(key)
    while results.count():
        docs = []
        for result in results:
            try:
                rev = db.get_rev(result["id"])
            except ResourceNotFound:
                pass
            else:
                docs.append({"_id": result["id"], "_rev": rev, "_deleted": True})

        db.bulk_save(docs, use_uuids=False)
        results = get_results(key)
Example #9
0
def paging(request):
    
    # what to show
    query = request.POST if request.method == "POST" else request.GET
    
    search_key = query.get("sSearch", "")
    show_all = query.get("show", "inbox") == "all"
    if search_key:
        if config.LUCENE_ENABLED:
            return lucene_search(request, search_key, show_all)
        view_name = "couchlog/all_by_msg" if show_all else "couchlog/inbox_by_msg"
        search = True
    else:
        view_name = "couchlog/all_by_date" if show_all else "couchlog/inbox_by_date"
        search = False
    
    def wrapper_func(row):
        """
        Given a row of the view, get out an exception record
        """
        error = ExceptionRecord.wrap(row["doc"])
        return _record_to_json(error)
        
    paginator = CouchPaginator(view_name, wrapper_func, search=search, 
                               view_args={"include_docs": True},
                               database=ExceptionRecord.get_db())
    
    # get our previous start/end keys if necessary
    # NOTE: we don't actually do anything with these yet, but we should for 
    # better pagination down the road.  using the "skip" parameter is not
    # super efficient.
    startkey = query.get("startkey", None)
    if startkey:
        startkey = json.loads(startkey)
    endkey = query.get("endkey", None)
    if endkey:
        endkey = json.loads(endkey)
    
    
    total_records = _couchlog_count()
    
    return paginator.get_ajax_response(request, extras={"startkey": startkey,
                                                        "endkey": endkey,
                                                        "iTotalRecords": total_records})
def get_records_to_process(search_key, batch_size):
    def wrapper(row):
        id = row["id"]
        doc = ExceptionRecord.get(id)
        domain = doc.domain if hasattr(doc, "domain") else ""
        try:
            domain, case_id = domain.split(',')
        except ValueError:
            return {'exception': doc}
        return {
            'domain': domain,
            'case_id': case_id,
            'exception': doc
        }

    search_key = "%s AND NOT archived" % search_key

    paginator = LucenePaginator(SEARCH_VIEW_NAME, wrapper, database=ExceptionRecord.get_db())
    return paginator.get_results(search_key, batch_size, 0)
Example #11
0
def purge_old_logs():
    key = datetime.now() - timedelta(weeks=52)
    results = ExceptionRecord.view(
        "couchlog/all_by_date",
        reduce=False,
        startkey=[key.isoformat()],
        descending=True,
        limit=1000,
        include_docs=False)

    db = ExceptionRecord.get_db()
    docs = []
    for result in results:
        docs.append({
            '_id': result['id'],
            '_rev': db.get_rev(result['id']),
            '_deleted': True,
        })

    db.bulk_save(docs, use_uuids=False)
    def next_page(self):
        extra = {}
        if self.bookmark:
            extra['bookmark'] = self.bookmark

        result = ExceptionRecord.get_db().search(
            'couchlog/_search/search',
            handler='_design',
            q=self.query,
            include_docs=True,
            limit=self.page_size,
            **extra
        )

        try:
            result.fetch()
            self.bookmark = result._result_cache.get('bookmark')
            return result
        except RequestFailed:
            # ignore for now
            return []
Example #13
0
def purge_old_logs():
    key = datetime.utcnow() - timedelta(weeks=52)

    db = ExceptionRecord.get_db()

    results = get_results(key)
    while results.count():
        docs = []
        for result in results:
            try:
                rev = db.get_rev(result['id'])
            except ResourceNotFound:
                pass
            else:
                docs.append({
                    '_id': result['id'],
                    '_rev': rev,
                    '_deleted': True,
                })

        db.bulk_save(docs, use_uuids=False)
        results = get_results(key)
Example #14
0
 def setUp(self):
     db = ExceptionRecord.get_db()
     for row in db.view("couchlog/all_by_date").all():
         safe_delete(db, row['id'])
Example #15
0
def _couchlog_count():
    count_results = ExceptionRecord.get_db().view("couchlog/count").one()
    return count_results["value"] if count_results else 0