Exemple #1
0
def graphs(request):
    gset = request.GET.get("gset", None)
    gsets = events.raiseDictEvent("GetGraphSets")
    if not gset in gsets:
        raise ModoboaException(_("Unknown graphic set"))
    searchq = request.GET.get("searchquery", None)
    period = request.GET.get("period", "day")
    tplvars = dict(graphs=[], period=period)
    if searchq in [None, "global"]:
        if not request.user.is_superuser:
            if not Domain.objects.get_for_admin(request.user).count():
                return ajax_simple_response({"status": "ok"})
            tplvars.update(
                domain=Domain.objects.get_for_admin(request.user)[0].name
            )
        else:
            tplvars.update(domain="global")
    else:
        domain = Domain.objects.filter(name__contains=searchq)
        if domain.count() != 1:
            return ajax_simple_response({"status": "ok"})
        if not request.user.can_access(domain[0]):
            raise PermDeniedException
        tplvars.update(domain=domain[0].name)

    if period == "custom":
        if not "start" in request.GET or not "end" in request.GET:
            raise ModoboaException(_("Bad custom period"))
        start = request.GET["start"]
        end = request.GET["end"]
        G = Grapher()
        period_name = "%s_%s" % (start.replace('-', ''), end.replace('-', ''))
        for tpl in gsets[gset].get_graphs():
            tplvars['graphs'].append(tpl.display_name)
            G.process(
                tplvars["domain"], period_name, str2Time(*start.split('-')),
                str2Time(*end.split('-')), tpl
            )
        tplvars["period_name"] = period_name
        tplvars["start"] = start
        tplvars["end"] = end
    else:
        tplvars['graphs'] = gsets[gset].get_graph_names()

    return ajax_simple_response(dict(
        status="ok",
        content=_render_to_string(request, "stats/graphs.html", tplvars)
    ))
Exemple #2
0
def graphs(request):
    gset = request.GET.get("gset", None)
    gsets = events.raiseDictEvent("GetGraphSets")
    if not gset in gsets:
        raise NotFound(_("Unknown graphic set"))
    searchq = request.GET.get("searchquery", None)
    period = request.GET.get("period", "day")
    tplvars = dict(graphs=[], period=period)
    if searchq in [None, "global"]:
        if not request.user.is_superuser:
            if not Domain.objects.get_for_admin(request.user).count():
                return render_to_json_response({})
            tplvars.update(
                domain=Domain.objects.get_for_admin(request.user)[0].name
            )
        else:
            tplvars.update(domain="global")
    else:
        domain = Domain.objects.filter(name__contains=searchq)
        if domain.count() != 1:
            return render_to_json_response({})
        if not request.user.can_access(domain[0]):
            raise PermDeniedException
        tplvars.update(domain=domain[0].name)

    if period == "custom":
        if not "start" in request.GET or not "end" in request.GET:
            raise BadRequest(_("Bad custom period"))
        start = request.GET["start"]
        end = request.GET["end"]
        G = Grapher()
        expr = re.compile(r'[:\- ]')
        period_name = "%s_%s" % (expr.sub('', start), expr.sub('', end))
        for tpl in gsets[gset].get_graphs():
            tplvars['graphs'].append(tpl.display_name)
            G.process(
                tplvars["domain"], period_name, str2Time(*expr.split(start)),
                str2Time(*expr.split(end)), tpl
            )
        tplvars["period_name"] = period_name
        tplvars["start"] = start
        tplvars["end"] = end
    else:
        tplvars['graphs'] = gsets[gset].get_graph_names()

    return render_to_json_response({
        'content': _render_to_string(request, "stats/graphs.html", tplvars)
    })
Exemple #3
0
def graphs(request):
    gset = request.GET.get("gset", None)
    gsets = events.raiseDictEvent("GetGraphSets")
    if not gset in gsets:
        raise NotFound(_("Unknown graphic set"))
    searchq = request.GET.get("searchquery", None)
    period = request.GET.get("period", "day")
    tplvars = dict(graphs=[], period=period)
    if searchq in [None, "global"]:
        if not request.user.is_superuser:
            if not Domain.objects.get_for_admin(request.user).count():
                return render_to_json_response({})
            tplvars.update(
                domain=Domain.objects.get_for_admin(request.user)[0].name)
        else:
            tplvars.update(domain="global")
    else:
        domain = Domain.objects.filter(name__contains=searchq)
        if domain.count() != 1:
            return render_to_json_response({})
        if not request.user.can_access(domain[0]):
            raise PermDeniedException
        tplvars.update(domain=domain[0].name)

    if period == "custom":
        if not "start" in request.GET or not "end" in request.GET:
            raise BadRequest(_("Bad custom period"))
        start = request.GET["start"]
        end = request.GET["end"]
        G = Grapher()
        expr = re.compile(r'[:\- ]')
        period_name = "%s_%s" % (expr.sub('', start), expr.sub('', end))
        for tpl in gsets[gset].get_graphs():
            tplvars['graphs'].append(tpl.display_name)
            G.process(tplvars["domain"], period_name,
                      str2Time(*expr.split(start)), str2Time(*expr.split(end)),
                      tpl)
        tplvars["period_name"] = period_name
        tplvars["start"] = start
        tplvars["end"] = end
    else:
        tplvars['graphs'] = gsets[gset].get_graph_names()

    return render_to_json_response(
        {'content': _render_to_string(request, "stats/graphs.html", tplvars)})
Exemple #4
0
    def _parse_date(self, line):
        """Try to match a date inside :kw:`line` and to convert it to
        a timestamp.

        We try different date format until we find valid one. We then
        store it for future use.

        :param str line: a log entry
        :return: the remaining part of the line or None
        """
        match = None
        if self.date_expr is None:
            for expr in [self._s_date_expr, self._hp_date_expr]:
                match = expr.match(line)
                if match is not None:
                    self.date_expr = expr
                    break
        else:
            match = self.date_expr.match(line)
        if match is None:
            return None
        ho = match.group("hour")
        mi = match.group("min")
        se = match.group("sec")
        se = int(int(se) / rrdstep)  # rrd step is one-minute => se = 0
        if self._prev_se != se or self._prev_mi != mi or self._prev_ho != ho:
            mo = match.group("month")
            da = match.group("day")
            try:
                ye = match.group("year")
            except IndexError:
                ye = self.year(mo)
            self.cur_t = str2Time(ye, mo, da, ho, mi, se)
            self.cur_t = self.cur_t - self.cur_t % rrdstep
            self._prev_mi = mi
            self._prev_ho = ho
            self._prev_se = se
        return match.group('eol')
Exemple #5
0
    def _parse_date(self, line):
        """Try to match a date inside :kw:`line` and to convert it to
        a timestamp.

        We try different date format until we find valid one. We then
        store it for future use.

        :param str line: a log entry
        :return: the remaining part of the line or None
        """
        match = None
        if self.date_expr is None:
            for expr in [self._s_date_expr, self._hp_date_expr]:
                match = expr.match(line)
                if match is not None:
                    self.date_expr = expr
                    break
        else:
            match = self.date_expr.match(line)
        if match is None:
            return None
        ho = match.group("hour")
        mi = match.group("min")
        se = match.group("sec")
        se = int(int(se) / rrdstep)  # rrd step is one-minute => se = 0
        if self._prev_se != se or self._prev_mi != mi or self._prev_ho != ho:
            mo = match.group("month")
            da = match.group("day")
            try:
                ye = match.group("year")
            except IndexError:
                ye = self.year(mo)
            self.cur_t = str2Time(ye, mo, da, ho, mi, se)
            self.cur_t = self.cur_t - self.cur_t % rrdstep
            self._prev_mi = mi
            self._prev_ho = ho
            self._prev_se = se
        return match.group('eol')
Exemple #6
0
    def process(self):
        id_expr = re.compile("([0-9A-F]+): (.*)")
        prev_se = -1
        prev_mi = -1
        prev_ho = -1
        for line in self.f.readlines():
            m = self.line_expr.match(line)
            if not m:
                continue
            (mo, da, ho, mi, se, host, prog, pid, log) = m.groups()
            se = int(int(se) / rrdstep)  # rrd step is one-minute => se = 0

            if prev_se != se or prev_mi != mi or prev_ho != ho:
                cur_t = str2Time(self.year(mo), mo, da, ho, mi, se)
                cur_t = cur_t - cur_t % rrdstep
                prev_mi = mi
                prev_ho = ho
                prev_se = se
            m = id_expr.match(log)
            if m:
                (line_id, line_log) = m.groups()

                m = re.search("message-id=<([^>]*)>", line_log)
                if m:
                    self.workdict[line_id] = {'from': m.group(1), 'size': 0}
                    continue

                m = re.search("from=<([^>]*)>, size=(\d+)", line_log)
                if m:
                    self.workdict[line_id] = {'from': m.group(1),
                                              'size': string.atoi(m.group(2))}
                    continue

                m = re.search("to=<([^>]*)>.*status=(\S+)", line_log)
                if m:
                    if not line_id in self.workdict:
                        if self.debug:
                            print "Inconsistent mail (%s: %s), skipping" % (line_id, m.group(1))
                        continue
                    if not m.group(2) in variables:
                        if self.debug:
                            print "Unsupported status %s, skipping" % m.group(2)
                        continue

                    addrfrom = re.match("([^@]+)@(.+)", self.workdict[line_id]['from'])
                    if addrfrom is not None and addrfrom.group(2) in self.domains:
                        self.inc_counter(addrfrom.group(2), cur_t, 'sent')
                        self.inc_counter(addrfrom.group(2), cur_t, 'size_sent',
                                         self.workdict[line_id]['size'])
                    addrto = re.match("([^@]+)@(.+)", m.group(1))
                    domname = addrto.group(2) if addrto is not None else None
                    if m.group(2) == "sent":
                        self.inc_counter(addrto.group(2), cur_t, 'recv')
                        self.inc_counter(addrto.group(2), cur_t, 'size_recv',
                                         self.workdict[line_id]['size'])
                    else:
                        self.inc_counter(domname, cur_t, m.group(2))
                    continue

                if self.debug:
                    print "Unknown line format: %s" % line_log
            else:
                m = re.match("NOQUEUE: reject: .*from=<(.*)> to=<([^>]*)>", log)
                if m:
                    addrto = re.match("([^@]+)@(.+)", m.group(2))
                    if addrto and addrto.group(2) in self.domains:
                        self.inc_counter(addrto.group(2), cur_t, 'reject')
                    continue
                if self.debug:
                    print "Unknown line format: %s" % log

        # Sort everything by time
        G = Grapher()
        for dom, data in self.data.iteritems():
            if self.debug:
                print "[rrd] dealing with domain %s" % dom
            for t in sorted(data.keys()):
                self.update_rrd(dom, t)

            for graph_tpl in MailTraffic().get_graphs():
                G.make_defaults(dom, graph_tpl)
Exemple #7
0
    def process(self):
        id_expr = re.compile("([0-9A-F]+): (.*)")
        prev_se = -1
        prev_mi = -1
        prev_ho = -1
        for line in self.f.readlines():
            m = self.line_expr.match(line)
            if not m:
                continue
            (mo, da, ho, mi, se, host, prog, pid, log) = m.groups()
            se = int(int(se) / rrdstep)  # rrd step is one-minute => se = 0

            if prev_se != se or prev_mi != mi or prev_ho != ho:
                cur_t = str2Time(self.year(mo), mo, da, ho, mi, se)
                cur_t = cur_t - cur_t % rrdstep
                prev_mi = mi
                prev_ho = ho
                prev_se = se
            m = id_expr.match(log)
            if m:
                (line_id, line_log) = m.groups()

                m = re.search("message-id=<([^>]*)>", line_log)
                if m:
                    self.workdict[line_id] = {"from": m.group(1), "size": 0}
                    continue

                m = re.search("from=<([^>]*)>, size=(\d+)", line_log)
                if m:
                    self.workdict[line_id] = {"from": m.group(1), "size": string.atoi(m.group(2))}
                    continue

                m = re.search("to=<([^>]*)>.*status=(\S+)", line_log)
                if m:
                    if not line_id in self.workdict:
                        if self.debug:
                            print "Inconsistent mail (%s: %s), skipping" % (line_id, m.group(1))
                        continue
                    if not m.group(2) in variables:
                        if self.debug:
                            print "Unsupported status %s, skipping" % m.group(2)
                        continue

                    addrfrom = re.match("([^@]+)@(.+)", self.workdict[line_id]["from"])
                    if addrfrom is not None and addrfrom.group(2) in self.domains:
                        self.inc_counter(addrfrom.group(2), cur_t, "sent")
                        self.inc_counter(addrfrom.group(2), cur_t, "size_sent", self.workdict[line_id]["size"])
                    addrto = re.match("([^@]+)@(.+)", m.group(1))
                    domname = addrto.group(2) if addrto is not None else None
                    if m.group(2) == "sent":
                        self.inc_counter(addrto.group(2), cur_t, "recv")
                        self.inc_counter(addrto.group(2), cur_t, "size_recv", self.workdict[line_id]["size"])
                    else:
                        self.inc_counter(domname, cur_t, m.group(2))
                    continue

                if self.debug:
                    print "Unknown line format: %s" % line_log
            else:
                m = re.match("NOQUEUE: reject: .*from=<(.*)> to=<([^>]*)>", log)
                if m:
                    addrto = re.match("([^@]+)@(.+)", m.group(2))
                    if addrto and addrto.group(2) in self.domains:
                        self.inc_counter(addrto.group(2), cur_t, "reject")
                    continue
                if self.debug:
                    print "Unknown line format: %s" % log

        # Sort everything by time
        G = Grapher()
        for dom, data in self.data.iteritems():
            if self.debug:
                print "[rrd] dealing with domain %s" % dom
            for t in sorted(data.keys()):
                self.update_rrd(dom, t)

            for graph_tpl in MailTraffic().get_graphs():
                G.make_defaults(dom, graph_tpl)