예제 #1
0
def compute_times(target, tables, criteria, params):
    tzname = criteria.business_hours_tzname
    logger.debug("timezone: %s" % tzname)
    tz = pytz.timezone(tzname)

    # Convert to datetime objects in the requested timezone
    st = criteria.starttime.astimezone(tz)
    et = criteria.endtime.astimezone(tz)
    logger.debug("times: %s - %s" % (st, et))

    # Business hours start/end, as string "HH:MMam" like 8:00am
    sb = parse_time(criteria.business_hours_start)
    eb = parse_time(criteria.business_hours_end)

    weekends = criteria.business_hours_weekends

    # Iterate from st to et until
    times = []
    t = st
    while t <= et:
        # Set t0/t1 to date of t but time of sb/eb
        t0_t = replace_time(t, sb)
        t1_t = replace_time(t, eb)

        # Advance t by 1 day
        t = t + datetime.timedelta(days=1)

        # Skip weekends
        if not weekends and t0_t.weekday() >= 5:
            continue

        # Now see if we have any overlap of busines hours for today
        if et < t0_t:
            # Report end time is today before busines hours start, all done
            break

        if et < t1_t:
            # Report end time is today in the middle of busines hours, adjust
            t1_t = et

        if t1_t < st:
            # Report start time occurs today *after* business end, nothing today
            continue

        if t0_t < st:
            # Report start time occurs today in the middle of the business hours
            # Adjust t0
            t0_t = st

        t0 = datetime_to_seconds(t0_t)
        t1 = datetime_to_seconds(t1_t)

        #logger.debug("  START: %s  END: %s" % (str(t0_t), str(t1_t)))
        times.append([t0 * 1000, t1 * 1000, t1 - t0])

    if len(times) == 0:
        return None
    else:
        return pandas.DataFrame(times,
                                columns=['starttime', 'endtime', 'totalsecs'])
def compute_times(target, tables, criteria, params):
    tzname = criteria.business_hours_tzname
    logger.debug("timezone: %s" % tzname)
    tz = pytz.timezone(tzname)

    # Convert to datetime objects in the requested timezone
    st = criteria.starttime.astimezone(tz)
    et = criteria.endtime.astimezone(tz)
    logger.debug("times: %s - %s" % (st, et))

    # Business hours start/end, as string "HH:MMam" like 8:00am
    sb = parse_time(criteria.business_hours_start)
    eb = parse_time(criteria.business_hours_end)

    weekends = criteria.business_hours_weekends

    # Iterate from st to et until
    times = []
    t = st
    while t <= et:
        # Set t0/t1 to date of t but time of sb/eb
        t0_t = replace_time(t, sb)
        t1_t = replace_time(t, eb)

        # Advance t by 1 day
        t = t + datetime.timedelta(days=1)

        # Skip weekends
        if not weekends and t0_t.weekday() >= 5:
            continue

        # Now see if we have any overlap of busines hours for today
        if et < t0_t:
            # Report end time is today before busines hours start, all done
            break

        if et < t1_t:
            # Report end time is today in the middle of busines hours, adjust
            t1_t = et

        if t1_t < st:
            # Report start time occurs today *after* business end, nothing today
            continue

        if t0_t < st:
            # Report start time occurs today in the middle of the business hours
            # Adjust t0
            t0_t = st

        t0 = datetime_to_seconds(t0_t)
        t1 = datetime_to_seconds(t1_t)

        #logger.debug("  START: %s  END: %s" % (str(t0_t), str(t1_t)))
        times.append([t0*1000, t1*1000, t1-t0])

    if len(times) == 0:
        return None
    else:
        return pandas.DataFrame(times, columns=['starttime', 'endtime', 'totalsecs'])
예제 #3
0
def analysis_generate_data(query, tables, criteria, params):
    t0 = datetime_to_seconds(criteria.starttime)
    t1 = datetime_to_seconds(criteria.endtime)

    data = []
    for t in range(t0, t1, params['source_resolution']):
        data.append([t, 1])

    df = pandas.DataFrame(data, columns=['time', 'value'])
    df['time'] = pandas.DatetimeIndex(df['time'] * 1000000000)
    return df
def analysis_generate_data(query, tables, criteria, params):
    t0 = datetime_to_seconds(criteria.starttime)
    t1 = datetime_to_seconds(criteria.endtime)

    data = []
    for t in range(t0, t1, params['source_resolution']):
        data.append([t, 1])

    df = pandas.DataFrame(data, columns=['time', 'value'])
    df['time'] = pandas.DatetimeIndex(df['time']*1000000000)
    return df
예제 #5
0
    def send_trap(self):
        """ Send a SNMP trap with id `trapid` to the IP address `manager`
        """
        oid = self.options.eoid             # cascade enterprise Object ID
        trapid = self.options.trapid        # base string for trap indicators

        community = self.options.community
        manager_ip = self.options.manager_ip

        severity = self.options.severity
        description = self.trap_description
        url = self.options.trap_url
        alert_level = self.options.alert_level
        now = timeutils.datetime_to_seconds(datetime.datetime.now())

        trapname = '.'.join([oid, trapid])

        ntf = ntforg.NotificationOriginator()

        err = ntf.sendNotification(ntforg.CommunityData(community),
                                   ntforg.UdpTransportTarget((manager_ip, 162)),
                                   'trap',
                                   trapname, 
                                   ('1.3.6.1.2.1.1.3.0', rfc1902.Integer(0)),                         # Uptime
                                   ('1.3.6.1.4.1.7054.71.2.1.0', rfc1902.Integer(severity)),            # Severity
                                   ('1.3.6.1.4.1.7054.71.2.3.0', rfc1902.OctetString(description)),
                                   ('1.3.6.1.4.1.7054.71.2.4.0', rfc1902.Integer(0)),                   # Event ID
                                   ('1.3.6.1.4.1.7054.71.2.5.0', rfc1902.OctetString(url)),
                                   ('1.3.6.1.4.1.7054.71.2.7.0', rfc1902.Integer(alert_level)),         # Alert Level
                                   ('1.3.6.1.4.1.7054.71.2.8.0', rfc1902.Integer(now)),                 # Start Time
                                   ('1.3.6.1.4.1.7054.71.2.16.0', rfc1902.Integer(0)),                  # Source Count
                                   ('1.3.6.1.4.1.7054.71.2.18.0', rfc1902.Integer(0)),                  # Destination Count
                                   ('1.3.6.1.4.1.7054.71.2.20.0', rfc1902.Integer(0)),                  # Protocol Count
                                   ('1.3.6.1.4.1.7054.71.2.22.0', rfc1902.Integer(0)),                  # Port Count                                
                                   )
예제 #6
0
    def process(cls, widget, job, data):
        newdata = []
        for row in data:
            newrow = []
            for col in row:
                if isinstance(col, datetime.datetime):
                    col = datetime_to_seconds(col)
                newrow.append(col)
            newdata.append(newrow)

        return newdata
예제 #7
0
def create_debug_zipfile(no_summary=False):
    """ Collects logfiles and system info into a zipfile for download/email

        `no_summary` indicates whether to include system information from
                     the helper script `flyscript_about.py` as part of the
                     zipped package.  Default is to include the file.
    """
    # setup correct timezone based on admin settings
    admin = User.objects.filter(is_superuser=True)[0]
    tz = pytz.timezone(admin.userprofile.timezone)
    current_tz = os.environ['TZ']

    try:
        # save TZ to environment for zip to use
        os.environ['TZ'] = str(tz)

        # if zlib is available, then let's compress the files
        # otherwise we will just append them like a tarball
        try:
            import zlib
            compression = zipfile.ZIP_DEFLATED
        except ImportError:
            compression = zipfile.ZIP_STORED

        # setup the name, correct timezone, and open the zipfile
        now = datetime_to_seconds(datetime.now(tz))
        archive_name = os.path.join(settings.PROJECT_ROOT,
                                    'debug-%d.zip' % now)

        myzip = zipfile.ZipFile(archive_name, 'w', compression=compression)

        try:
            # find all of the usual logfiles
            filelist = glob.glob(os.path.join(settings.PROJECT_ROOT, 'log*'))

            logging.debug('zipping log files ...')
            for fname in filelist:
                debug_fileinfo(fname)
                myzip.write(fname)

            if not no_summary:
                logging.debug('running about script')
                response = '\n'.join(system_info())
                logging.debug('zipping about script')
                myzip.writestr('system_summary.txt', response)
        finally:
            myzip.close()

    finally:
        # return env to its prior state
        os.environ['TZ'] = current_tz

    return archive_name
예제 #8
0
def create_debug_zipfile(no_summary=False):
    """ Collects logfiles and system info into a zipfile for download/email

        `no_summary` indicates whether to include system information from
                     the helper script `flyscript_about.py` as part of the
                     zipped package.  Default is to include the file.
    """
    # setup correct timezone based on admin settings
    admin = User.objects.filter(is_superuser=True)[0]
    tz = pytz.timezone(admin.userprofile.timezone)
    current_tz = os.environ['TZ']

    try:
        # save TZ to environment for zip to use
        os.environ['TZ'] = str(tz)

        # if zlib is available, then let's compress the files
        # otherwise we will just append them like a tarball
        try:
            import zlib
            compression = zipfile.ZIP_DEFLATED
        except ImportError:
            compression = zipfile.ZIP_STORED

        # setup the name, correct timezone, and open the zipfile
        now = datetime_to_seconds(datetime.now(tz))
        archive_name = os.path.join(settings.PROJECT_ROOT, 'debug-%d.zip' % now)

        myzip = zipfile.ZipFile(archive_name, 'w', compression=compression)

        try:
            # find all of the usual logfiles
            filelist = glob.glob(os.path.join(settings.PROJECT_ROOT, 'log*'))

            logging.debug('zipping log files ...')
            for fname in filelist:
                debug_fileinfo(fname)
                myzip.write(fname)

            if not no_summary:
                logging.debug('running about script')
                response = '\n'.join(system_info())
                logging.debug('zipping about script')
                myzip.writestr('system_summary.txt', response)
        finally:
            myzip.close()

    finally:
        # return env to its prior state
        os.environ['TZ'] = current_tz

    return archive_name
예제 #9
0
    def handle(self, *args, **options):
        """ Main command handler
        """
        self.options = options

        if options['table_list']:
            # print out the id's instead of processing anything
            tables = Table.objects.all()
            for t in tables:
                self.console('%5d - %s' % (t.id, t))
        elif options['table_list_by_report']:
            # or print them out organized by report/widget/table
            output = []
            reports = Report.objects.all()
            for report in reports:
                for widget in report.widget_set.all():
                    for table in widget.tables.all():
                        line = [table.id, report.title, widget.title, table]
                        output.append(line)
            Formatter.print_table(output, ['ID', 'Report', 'Widget', 'Table'])
        else:
            if 'table_id' in options and options['table_id'] is not None:
                table = Table.objects.get(id=options['table_id'])
            elif 'table_name' in options:
                table = Table.objects.get(name=options['table_name'])
            else:
                raise ValueError("Must specify either --table-id or --table-name to run a table")
                
            # Django gives us a nice error if we can't find the table
            self.console('Table %s found.' % table)

            # Look for a related report
            widgets = Widget.objects.filter(tables__in=[table])
            if len(widgets) > 0:
                report = widgets[0].report
                form = create_report_criteria_form(report=report)
            else:
                form = None

            add_options = {}
            if 'criteria' in options and options['criteria'] is not None:
                for s in options['criteria']:
                    (k,v) = s.split(':', 1)
                    add_options[k] = v

            if 'endtime' in options and options['endtime'] is not None:
                try:
                    endtime = form.fields['endtime'].clean(options['endtime'])
                except ValidationError:
                    raise ValidationError("Could not parse endtime: %s, try MM/DD/YYYY HH:MM am" % options['endtime'])
                tz = pytz.timezone("US/Eastern")
                endtime = endtime.replace(tzinfo=tz)
            else:
                endtime = datetime.datetime.now()

            criteria = Criteria(endtime=datetime_to_seconds(endtime),
                                duration=options['duration'],
                                filterexpr=options['filterexpr'],
                                table=table,
                                ignore_cache=options['ignore_cache'])


            if form:
                for k,field in form.fields.iteritems():
                    if not k.startswith('criteria_'): continue

                    tc = TableCriteria.objects.get(pk=k.split('_')[1])

                    if (  options['criteria'] is not None and
                          tc.keyword in add_options):
                        val = add_options[tc.keyword]
                    else:
                        val = field.initial

                    # handle table criteria and generate children objects
                    tc = TableCriteria.get_instance(k, val) 
                    criteria[k] = tc
                    for child in tc.children.all():
                        child.value = val
                        criteria['criteria_%d' % child.id] = child

            columns = [c.name for c in table.get_columns()]

            if options['only_columns']:
                print columns
                return

            job = Job.create(table=table, criteria=criteria)
            job.save()

            self.console('Job created: %s' % job)
            self.console('Criteria: %s' % criteria.print_details())

            start_time = datetime.datetime.now()
            job.start()
            self.console('Job running . . ', ending='')

            # wait for results
            while not job.done():
                #self.console('. ', ending='')
                #self.stdout.flush()
                time.sleep(1)

            end_time = datetime.datetime.now()
            delta = end_time - start_time
            seconds = float(delta.microseconds + 
                            (delta.seconds + delta.days * 24 * 3600) * 10**6) / 10**6

            self.console('Done!! (elapsed time: %.2f seconds)' % seconds)
            self.console('')

            # Need to refresh the column list in case the job changed them (ephemeral cols)
            columns = [c.name for c in table.get_columns()]

            if job.status == job.COMPLETE:
                if options['as_csv']:
                    Formatter.print_csv(job.values(), columns)
                else:
                    Formatter.print_table(job.values(), columns)
            else:
                self.console("Job completed with an error:")
                self.console(job.message)
                sys.exit(1)
예제 #10
0
    def run(
        self, template_id, timefilter=None, resolution="auto", query=None, trafficexpr=None, data_filter=None, sync=True
    ):
        """Create the report on Profiler and begin running
        the report.  If the `sync` option is True, periodically
        poll until the report is complete, otherwise return
        immediately.

        `template_id` is the numeric id of the template to use for the report

        `timefilter` is the range of time to query, a TimeFilter object
        
        `resolution` is the data resolution (1min, 15min, etc.), defaults to 'auto'

        `query` is the query object containing criteria

        `trafficexpr` is a TrafficFilter object

        `data_filter` is a deprecated filter to run against report data

        `sync` if True, poll for status until the report is complete
        """

        self.template_id = template_id
        self.custom_columns = False
        if self.template_id != 184:
            # the columns in this report won't match, use custom columns instead
            self.custom_columns = True

        if timefilter is None:
            self.timefilter = TimeFilter.parse_range("last 5 min")
        else:
            self.timefilter = timefilter
        self.query = query
        self.trafficexpr = trafficexpr

        self.data_filter = data_filter

        self.id = None
        self.queries = list()
        self.last_status = None

        if resolution not in ["auto", "1min", "15min", "hour", "6hour", "day", "week", "month"]:
            rd = parse_timedelta(resolution)
            resolution = self.RESOLUTION_MAP[int(timedelta_total_seconds(rd))]

        self.resolution = resolution

        start = datetime_to_seconds(self.timefilter.start)
        end = datetime_to_seconds(self.timefilter.end)

        # using a RecursiveUpdateDict
        criteria = RecursiveUpdateDict(**{"time_frame": {"start": int(start), "end": int(end)}})

        if self.query is not None:
            criteria["query"] = self.query

        if self.resolution != "auto":
            criteria["time_frame"]["resolution"] = self.resolution

        if self.data_filter:
            criteria["deprecated"] = {self.data_filter[0]: self.data_filter[1]}

        if self.trafficexpr is not None:
            criteria["traffic_expression"] = self.trafficexpr.filter

        to_post = {"template_id": self.template_id, "criteria": criteria}

        logger.debug("Posting JSON: %s" % to_post)

        response = self.profiler.api.report.reports(data=to_post)

        try:
            self.id = int(response["id"])
        except KeyError:
            raise ValueError("failed to retrieve report id from report creation response: %s" % response)

        logger.info("Created report %d" % self.id)

        if sync:
            self.wait_for_complete()