示例#1
0
    def parse_time(self, time, opt, tz):
        if not self._parse_time:
            self._find_time(time)
            time = self._parse_time(time, opt, tz)
            # xxx default timezone?
            #global t0
            #t0 = t0.astimezone(time.tzinfo)
        else:
            time = self._parse_time(time, opt, tz)

        # convert to internal fp repr
        if time.tzinfo==None:
            if tz==None:
                msg = "no timezone for %s; specify input timezone, e.g., --itz=-5" % time
                raise Exception(msg)
            else:
                tzinfo = dateutil.tz.tzoffset('xxx', tz.total_seconds())
                time = time.replace(tzinfo=tzinfo)
        time = util.t2f(time)
    
        # done
        return time
示例#2
0
def _get_graphs(ses):

    opt = ses.opt

    if not hasattr(opt, 'after') or not opt.after: opt.after = float('-inf')
    if not hasattr(opt, 'before') or not opt.before: opt.before = float('inf')
    if not hasattr(opt, 'every'): opt.every = 0
    if type(opt.after) == str: opt.after = util.datetime_parse(opt.after)
    if type(opt.before) == str: opt.before = util.datetime_parse(opt.before)
    if type(opt.after) == dt.datetime: opt.after = util.t2f(opt.after)
    if type(opt.before) == dt.datetime: opt.before = util.t2f(opt.before)

    # generate descriptors by sniffing for specs that don't have it
    specs = descriptors.sniff(ses, *opt.specs)

    # parse specs, group them by file and parser
    ses.series = []  # all
    opt.fns = collections.defaultdict(list)  # grouped by fn
    for spec_ord, spec in enumerate(specs):
        try:
            for s in graphing.get_series(ses, spec, spec_ord):
                opt.fns[(s.fn,
                         s.parser)].append(s)  # xxx canonicalize filename
                ses.series.append(s)
        except Exception as e:
            # xxx should we raise exception and so abort, or carry on processing all we can?
            traceback.print_exc()
            raise Exception('error processing %s: %s' % (spec, e))
    graphing.finish(ses.series)

    # process by file according to parser
    for fn, parser in sorted(opt.fns):
        opt.last_time = -float('inf')
        process.parse_and_process(ses, fn, opt.fns[(fn, parser)], opt, parser)

    # finish each series
    for s in ses.series:
        s.finish()

    # sort them
    ses.series.sort(key=lambda s: s.sort_ord)

    # get graphs taking into account splits and merges
    graphs = collections.defaultdict(Graph)
    ygroups = collections.defaultdict(list)
    for s in ses.series:
        s.get_graphs(graphs, ygroups, opt)

    # compute display_ymax taking into account spec_ymax and ygroup
    for g in graphs.values():
        for s in g:
            s.display_ymax = max(s.ymax, s.spec_ymax)
    for ygroup in ygroups.values():
        ygroup_ymax = max(s.ymax for s in ygroup)
        for s in ygroup:
            s.display_ymax = max(s.display_ymax, ygroup_ymax)

    # our result
    ses.graphs = graphs.values()

    # finish if no data
    if not ses.graphs:
        ses.progress('no data found')
        return

    # duration parameter overrides tmax
    if opt.duration:  # in seconds
        opt.tmax = opt.tmin + dt.timedelta(0, opt.duration)

    # compute time ranges
    opt.tmin = min(s.tmin for g in graphs.values() for s in g if s.tmin)
    opt.tmax = max(s.tmax for g in graphs.values() for s in g if s.tmax)
    opt.tspan = opt.tmax - opt.tmin

    # compute left and right edges of graphing area
    graphing.get_time_bounds(opt)

    # show times
    start_time = util.f2t(opt.tmin).strftime('%Y-%m-%d %H:%M:%SZ')
    finish_time = util.f2t(opt.tmax).strftime('%Y-%m-%d %H:%M:%SZ')
    ses.advise(
        'start: %s, finish: %s, duration: %s' %
        (start_time, finish_time, util.f2t(opt.tmax) - util.f2t(opt.tmin)))

    # compute ticks
    ranges = [1, 2.5, 5, 10, 15, 20, 30, 60]  # seconds
    ranges += [r * 60 for r in ranges]  # minutes
    ranges += [r * 3600 for r in 1, 2, 3, 4, 6, 8, 12, 24]  # hours
    nticks = int(opt.width / 5)
    if nticks < 1: nticks = 1
    tickdelta = opt.tspan / nticks
    for r in ranges:
        if tickdelta < r:
            tickdelta = r
            break
    # long duration (multiple days); make tickedelta an exact number of days
    if tickdelta != r:
        tickdelta = math.ceil(tickdelta / (24 * 3600)) * (24 * 3600)
    slop = 0.1  # gives us ticks near beginning or end if those aren't perfectly second-aligned
    tickmin = math.ceil((opt.tmin - slop) / tickdelta) * tickdelta
    opt.ticks = []
    for i in range(nticks + 1):
        t = tickmin + i * tickdelta
        if t > opt.tmax + slop: break
        opt.ticks.append(t)
示例#3
0
def _get_graphs(ses):

    opt = ses.opt

    if not hasattr(opt, 'after') or not opt.after: opt.after = float('-inf')
    if not hasattr(opt, 'before') or not opt.before: opt.before = float('inf')
    if not hasattr(opt, 'every'): opt.every = 0
    if type(opt.after)==str: opt.after = util.datetime_parse(opt.after)
    if type(opt.before)==str: opt.before = util.datetime_parse(opt.before)
    if type(opt.after)==dt.datetime: opt.after = util.t2f(opt.after)
    if type(opt.before)==dt.datetime: opt.before = util.t2f(opt.before)

    # generate descriptors by sniffing for specs that don't have it
    specs = descriptors.sniff(ses, *opt.specs)

    # parse specs, group them by file and parser
    ses.series = [] # all
    opt.fns = collections.defaultdict(list) # grouped by fn
    for spec_ord, spec in enumerate(specs):
        try:
            for s in graphing.get_series(ses, spec, spec_ord):
                opt.fns[(s.fn,s.parser)].append(s) # xxx canonicalize filename
                ses.series.append(s)
        except Exception as e:
            # xxx should we raise exception and so abort, or carry on processing all we can?
            traceback.print_exc()
            raise Exception('error processing %s: %s' % (spec, e))
    graphing.finish(ses.series)

    # process by file according to parser
    for fn, parser in sorted(opt.fns):
        opt.last_time = - float('inf')
        process.parse_and_process(ses, fn, opt.fns[(fn,parser)], opt, parser)
        
    # finish each series
    for s in ses.series:
        s.finish()

    # sort them
    ses.series.sort(key=lambda s: s.sort_ord)

    # get graphs taking into account splits and merges
    graphs = collections.defaultdict(Graph)
    ygroups = collections.defaultdict(list)
    for s in ses.series:
        s.get_graphs(graphs, ygroups, opt)

    # compute display_ymax taking into account spec_ymax and ygroup
    for g in graphs.values():
        for s in g:
            s.display_ymax = max(s.ymax, s.spec_ymax)
    for ygroup in ygroups.values():
        ygroup_ymax = max(s.ymax for s in ygroup)
        for s in ygroup:
            s.display_ymax = max(s.display_ymax, ygroup_ymax)

    # our result
    ses.graphs = graphs.values()

    # finish if no data
    if not ses.graphs:
        ses.progress('no data found')
        return

    # duration parameter overrides tmax
    if opt.duration: # in seconds
        opt.tmax = opt.tmin + dt.timedelta(0, opt.duration)

    # compute time ranges
    opt.tmin = min(s.tmin for g in graphs.values() for s in g if s.tmin)
    opt.tmax = max(s.tmax for g in graphs.values() for s in g if s.tmax)
    opt.tspan = opt.tmax - opt.tmin

    # compute left and right edges of graphing area
    graphing.get_time_bounds(opt)

    # show times
    start_time = util.f2t(opt.tmin).strftime('%Y-%m-%d %H:%M:%SZ')
    finish_time = util.f2t(opt.tmax).strftime('%Y-%m-%d %H:%M:%SZ')
    ses.advise('start: %s, finish: %s, duration: %s' % (
        start_time, finish_time, util.f2t(opt.tmax) - util.f2t(opt.tmin)
    ))

    # compute ticks
    ranges = [1, 2.5, 5, 10, 15, 20, 30, 60] # seconds
    ranges += [r*60 for r in ranges] # minutes
    ranges += [r*3600 for r in 1, 2, 3, 4, 6, 8, 12, 24] # hours
    nticks = int(opt.width / 5)
    if nticks<1: nticks = 1
    tickdelta = opt.tspan / nticks
    for r in ranges:
        if tickdelta < r:
            tickdelta = r
            break
    # long duration (multiple days); make tickedelta an exact number of days
    if tickdelta != r:
        tickdelta = math.ceil(tickdelta / (24*3600)) * (24*3600)
    slop = 0.1 # gives us ticks near beginning or end if those aren't perfectly second-aligned
    tickmin = math.ceil((opt.tmin-slop)/tickdelta) * tickdelta
    opt.ticks = []
    for i in range(nticks+1):
        t = tickmin + i * tickdelta
        if t > opt.tmax+slop: break
        opt.ticks.append(t)