def get_endpoint_by_name(db_session, endpoint_name): """ Returns the Endpoint object from a given endpoint_name. If the result doesn't exist in the database, a new row is added. :param db_session: session for the database :param endpoint_name: string with the endpoint name :return Endpoint object """ try: result = db_session.query(Endpoint). \ filter(Endpoint.name == endpoint_name).one() result.time_added = to_local_datetime(result.time_added) result.last_requested = to_local_datetime(result.last_requested) except NoResultFound: result = Endpoint(name=endpoint_name) db_session.add(result) db_session.flush() db_session.expunge(result) return result
def overview(): week_ago = datetime.datetime.utcnow() - datetime.timedelta(days=7) now_local = to_local_datetime(datetime.datetime.utcnow()) today_local = now_local.replace(hour=0, minute=0, second=0, microsecond=0) today_utc = to_utc_datetime(today_local) result = [] with session_scope() as db_session: from numpy import median hits_today = count_requests_group(db_session, Request.time_requested > today_utc) hits_week = count_requests_group(db_session, Request.time_requested > week_ago) hits = count_requests_group(db_session) median_today = get_endpoint_data_grouped( db_session, median, Request.time_requested > today_utc) median_week = get_endpoint_data_grouped( db_session, median, Request.time_requested > week_ago) median = get_endpoint_data_grouped(db_session, median) access_times = get_last_requested(db_session) for endpoint in get_endpoints(db_session): result.append({ 'id': endpoint.id, 'name': endpoint.name, 'color': get_color(endpoint.name), 'hits-today': get_value(hits_today, endpoint.id), 'hits-week': get_value(hits_week, endpoint.id), 'hits-overall': get_value(hits, endpoint.id), 'median-today': get_value(median_today, endpoint.id), 'median-week': get_value(median_week, endpoint.id), 'median-overall': get_value(median, endpoint.id), 'last-accessed': get_value(access_times, endpoint.name, default=None) }) version = get_details(db_session)['dashboard-version'] return render_template('fmd_dashboard/overview.html', result=result, is_admin=is_admin(), title='Dashboard Overview', version=version)
def hourly_load_graph(form, endpoint_id=None): """ Return HTML string for generating a Heatmap. :param form: A SelectDateRangeForm, which is used to filter the selection :param endpoint_id: optionally, filter the data on a specific endpoint :return: HTML code with the graph """ # list of hours: 0:00 - 23:00 hours = ['0{}:00'.format(h) for h in range(0, 10) ] + ['{}:00'.format(h) for h in range(10, 24)] days = form.get_days() # create empty 2D-list: [hour][day] heatmap_data = numpy.zeros((len(hours), len(days))) # add data from database to heatmap_data start_datetime = to_utc_datetime( datetime.datetime.combine(form.start_date.data, datetime.time(0, 0, 0, 0))) end_datetime = to_utc_datetime( datetime.datetime.combine(form.end_date.data, datetime.time(23, 59, 59))) with session_scope() as db_session: for time, count in get_num_requests(db_session, endpoint_id, start_datetime, end_datetime): parsed_time = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S') day_index = (parsed_time - start_datetime).days hour_index = int(to_local_datetime(parsed_time).strftime('%H')) heatmap_data[hour_index][day_index] = count start_datetime = to_local_datetime(start_datetime - datetime.timedelta( days=1)).strftime('%Y-%m-%d 12:00:00') end_datetime = to_local_datetime( form.end_date.data).strftime('%Y-%m-%d 12:00:00') layout = get_layout(xaxis=go.XAxis(range=[start_datetime, end_datetime])) return get_figure(layout, [plot_heatmap(x=days, y=hours, z=heatmap_data)])
def get_endpoint_overview(session): """ :param session: session for the database :return: A list of properties for each endpoint that is found in the database """ week_ago = datetime.datetime.utcnow() - datetime.timedelta(days=7) now_local = to_local_datetime(datetime.datetime.utcnow()) today_local = now_local.replace(hour=0, minute=0, second=0, microsecond=0) today_utc = to_utc_datetime(today_local) # First flush last requested info to db cache.flush_cache() error_hits_criterion = and_(Request.status_code >= 400, Request.status_code < 600) hits_today = count_requests_group(session, Request.time_requested > today_utc) hits_today_errors = count_requests_group( session, and_(Request.time_requested > today_utc, error_hits_criterion) ) hits_week = count_requests_group(session, Request.time_requested > week_ago) hits_week_errors = count_requests_group( session, and_(Request.time_requested > week_ago, error_hits_criterion) ) hits = count_requests_group(session) median_today = get_endpoint_data_grouped(session, median, Request.time_requested > today_utc) median_week = get_endpoint_data_grouped(session, median, Request.time_requested > week_ago) median_overall = get_endpoint_data_grouped(session, median) access_times = get_last_requested(session) return [ { 'id': endpoint.id, 'name': endpoint.name, 'monitor': endpoint.monitor_level, 'color': get_color(endpoint.name), 'hits-today': get_value(hits_today, endpoint.id), 'hits-today-errors': get_value(hits_today_errors, endpoint.id), 'hits-week': get_value(hits_week, endpoint.id), 'hits-week-errors': get_value(hits_week_errors, endpoint.id), 'hits-overall': get_value(hits, endpoint.id), 'median-today': get_value(median_today, endpoint.id), 'median-week': get_value(median_week, endpoint.id), 'median-overall': get_value(median_overall, endpoint.id), 'last-accessed': get_value(access_times, endpoint.name, default=None), } for endpoint in get_endpoints(session) ]
def get_endpoint_details(db_session, endpoint_id): """ Returns details about an endpoint. :param db_session: session for the database :param endpoint_id: id of the endpoint :return dictionary """ endpoint = get_endpoint_by_id(db_session, endpoint_id) endpoint.time_added = to_local_datetime(endpoint.time_added) return { 'id': endpoint_id, 'endpoint': endpoint.name, 'rules': ', '.join([r.rule for r in get_rules(endpoint.name)]), 'rule': endpoint, 'url': get_url(endpoint.name), 'total_hits': count_requests(db_session, endpoint.id) }
def profiler(endpoint_id): page, per_page, offset = get_page_args(page_parameter='page', per_page_parameter='per_page') with session_scope() as db_session: details = get_endpoint_details(db_session, endpoint_id) requests = get_profiled_requests(db_session, endpoint_id, offset, per_page) total = count_profiled_requests(db_session, endpoint_id) pagination = Pagination(page=page, per_page=per_page, total=total, format_number=True, css_framework='bootstrap4', format_total=True, record_name='profiled requests') body = {} # dict with the request.id as a key, and the values is a list for every stack_line. for request in requests: request.time_requested = to_local_datetime(request.time_requested) body[request.id] = [get_body(index, request.stack_lines) for index, _ in enumerate(request.stack_lines)] return render_template('fmd_dashboard/profiler.html', details=details, requests=requests, pagination=pagination, title='Profiler results for {}'.format(details['endpoint']), body=body)
def get_daterange_form(num_days=20): """ Returns a SelectDateRangeForm with two dates: - end_date is today - start_date is the today - numdays :param num_days: the date for the start_date :return: A SelectDateRangeForm object with the required logic """ form = SelectDateRangeForm(request.form) if form.validate(): if form.start_date.data > form.end_date.data: form.start_date.data, form.end_date.data = form.end_date.data, form.start_date.data else: form.end_date.data = to_local_datetime( datetime.datetime.utcnow()).date() form.start_date.data = form.end_date.data - datetime.timedelta( days=num_days) return form
def get_profiler_table(session, endpoint_id, offset, per_page): """ :param session: session for the database :param endpoint_id: endpoint to filter on :param offset: number of items that are skipped :param per_page: number of items that are returned (at most) """ table = get_profiled_requests(session, endpoint_id, offset, per_page) for idx, row in enumerate(table): row.time_requested = to_local_datetime(row.time_requested) table[idx] = row2dict(row) stack_lines = [] for line in row.stack_lines: obj = row2dict(line) obj['code'] = row2dict(line.code) stack_lines.append(obj) table[idx]['stack_lines'] = stack_lines return table
def get_outlier_table(session, endpoint_id, offset, per_page): """ :param session: session for the database :param endpoint_id: id of the endpoint :param offset: number of items to be skipped :param per_page: maximum number of items to be returned :return: a list of length at most 'per_page' with data about each outlier """ table = get_outliers_sorted(session, endpoint_id, offset, per_page) for idx, row in enumerate(table): row.request.time_requested = to_local_datetime(row.request.time_requested) try: row.request_url = row.request_url.decode('utf-8') except Exception as e: log(e) dict_request = row2dict(row.request) table[idx] = row2dict(row) table[idx]['request'] = dict_request return table
def test_profiler_table(dashboard_user, stack_line, request_1, endpoint, offset, per_page): response = dashboard_user.get( 'dashboard/api/profiler_table/{0}/{1}/{2}'.format(endpoint.id, offset, per_page), ) assert response.status_code == 200 [data] = response.json assert data['duration'] == str(request_1.duration) assert data['endpoint_id'] == str(endpoint.id) assert data['group_by'] == request_1.group_by assert data['id'] == str(request_1.id) assert data['ip'] == request_1.ip assert data['status_code'] == str(request_1.status_code) assert data['time_requested'] == str(to_local_datetime(request_1.time_requested)) assert data['version_requested'] == request_1.version_requested assert len(data['stack_lines']) == 1 assert data['stack_lines'][0]['code']['code'] == stack_line.code.code assert data['stack_lines'][0]['code']['filename'] == stack_line.code.filename assert data['stack_lines'][0]['code']['function_name'] == stack_line.code.function_name assert data['stack_lines'][0]['code']['line_number'] == str(stack_line.code.line_number)
def get_endpoint_details(db_session, endpoint_id): """ Returns details about an endpoint. :param db_session: session for the database :param endpoint_id: id of the endpoint :return dictionary """ endpoint = get_endpoint_by_id(db_session, endpoint_id) endpoint.time_added = to_local_datetime(endpoint.time_added) flask_rule = get_rules(endpoint.name) methods = [list(rule.methods) for rule in flask_rule] methods = sum(methods, []) # flatten list return { 'id': endpoint_id, 'color': get_color(endpoint.name), 'methods': list(dict.fromkeys(methods)), 'endpoint': endpoint.name, 'rules': [r.rule for r in get_rules(endpoint.name)], 'monitor-level': endpoint.monitor_level, 'url': get_url(endpoint.name), 'total_hits': count_requests(db_session, endpoint.id) }
def get_last_tested_times(db_session): """ Returns the last tested time of each of the endpoints. """ results = db_session.query(TestEndpoint, func.max(TestEndpoint.time_added)).join( TestEndpoint.endpoint).group_by(TestEndpoint.endpoint_id).all() return [(result[0].endpoint.name, to_local_datetime(result[1])) for result in results]
def test_timezone(): dt = datetime.datetime.now() assert to_local_datetime(to_utc_datetime(dt)) == dt assert to_utc_datetime(to_local_datetime(dt)) == dt
def test_timezone_none(): assert to_local_datetime(None) is None assert to_utc_datetime(None) is None
def test_timezone(self): dt = datetime.datetime.now() self.assertEqual(to_local_datetime(to_utc_datetime(dt)), dt) self.assertEqual(to_utc_datetime(to_local_datetime(dt)), dt)
def test_timezone_none(self): self.assertEqual(to_local_datetime(None), None) self.assertEqual(to_utc_datetime(None), None)