Ejemplo n.º 1
0
def report_log_json(request, report_hash=None):
    source_url, source_certs, source_verify = get_server(request)
    # Redirects to the events page if GET param latest is true..
    context = {}

    if report_hash is None:
        context['error'] = 'Report Hash not provided.'
        return HttpResponse(json.dumps(context), content_type="application/json")

    report_logs = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/reports/' + report_hash + '/logs',
        api_version='v4',
    )
    if 'error' in report_logs:
        context = report_logs
        return HttpResponse(json.dumps(context), content_type="application/json")

    # Remove the dict from the list...
    for log in report_logs:
        # Parse... 2015-09-18T18:02:04.753163330+02:00
        # Puppetlabs... has a super long millisecond counter (9 digits!!!)
        # We need to trim those down...
        time = log['time'][0:26] + log['time'][-6:-3] + log['time'][-2:]
        time = arrow.get(time).to('UTC').datetime
        log['time'] = filters.date(localtime(time), 'Y-m-d H:i:s')

    context['agent_log'] = report_logs
    context['report_hash'] = report_hash
    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 2
0
def get_report(key, value, request, timespan='latest', environment=None):
    source_url, source_certs, source_verify = get_server(request)
    # If key is any of the below, all is good!
    allowed_keys = ['certname', 'resource_title', 'resource_type', 'containing_class']
    if key in allowed_keys:
        pass
    # If key does not match above the default will be shown
    else:
        key = 'containing_class'

    events_params = {
        'query':
            {
                'operator': 'and',
                2: '["=","' + key + '","' + value + '"]',
                3: '["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]'
            },
    }
    if timespan == 'latest':
        events_params['query'][1] = '["=","latest_report?",true]'
    elif len(timespan) == 2:
        events_params['query'][1] = '["and",[">","timestamp","' + timespan[0] + '"],["<", "timestamp", "' + timespan[
            1] + '"]]'

    results = pdb_api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/events',
        api_version='v4',
        params=mk_puppetdb_query(events_params, request),
    )
    return results
Ejemplo n.º 3
0
def reports(request, certname=None):
    context = {'timezones': pytz.common_timezones,
               'SOURCES': AVAILABLE_SOURCES}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)
    # Redirects to the events page if GET param latest is true..
    if request.GET.get('latest', False):
        if request.GET.get('latest') == "true":
            request.session['report_page'] = 1
            latest_report_params = {
                'query':
                    {
                        1: '["=","certname","' + certname + '"]'
                    },
                'order_by':
                    {
                        'order_field':
                            {
                                'field': 'start_time',
                                'order': 'desc',
                            },
                    },
                'limit': 1,
            }
            latest_report = puppetdb.api_get(
                api_url=source_url,
                cert=source_certs,
                verify=source_verify,
                path='/reports',
                api_version='v4',
                params=puppetdb.mk_puppetdb_query(latest_report_params, request),
            )
            report_hash = ""
            # If latest reports do not exist, send to the nodes page
            # Should only occur if the user is trying to hax their way
            # into a node without having the correct permission
            if latest_report:
                for report in latest_report:
                    report_env = report['environment']
                    report_hash = report['hash']
                return redirect('/pano/events/' + report_hash + '?report_timestamp=' + request.GET.get(
                    'report_timestamp') + '&envname=' + report_env)
            else:
                return redirect('/pano/nodes/')

    if certname != request.session.get('last_shown_node', ''):
        request.session['last_shown_node'] = certname
        request.session['report_page'] = 1

    context['certname'] = certname
    context['node_facts'] = ','.join(NODES_DEFAULT_FACTS)

    return render(request, 'pano/reports.html', context)
Ejemplo n.º 4
0
def catalogue_json(request, certname=None):
    context = dict()
    if not certname:
        context["error"] = "Must specify certname."
        return HttpResponse(json.dumps(context), content_type="application/json")
    source_url, source_certs, source_verify = get_server(request)

    # Redirects to the events page if GET param latest is true..
    show = request.GET.get("show", None)
    reports_params = {}
    if show is not None and show in ["edges", "resources"]:
        if show == "edges":
            sort_field = "source_title"
        elif show == "resources":
            sort_field = "title"
        reports_params = {"order_by": {"order_field": {"field": sort_field, "order": "asc"}}}
        path = "/catalogs/%s/%s" % (certname, show)
    else:
        path = "/catalogs/%s" % certname

    reports_list = puppetdb.api_get(
        path=path, api_url=source_url, api_version="v4", params=puppetdb.mk_puppetdb_query(reports_params, request)
    )
    data = {"data": reports_list}
    return HttpResponse(json.dumps(data, indent=2), content_type="application/json")
Ejemplo n.º 5
0
def catalogue_compare_json(request, certname1=None, certname2=None):
    source_url, source_certs, source_verify = get_server(request)
    show = request.GET.get("show", "edges")
    if show is not None and show in ["edges", "resources"]:
        if show == "edges":
            sort_field = "source_title"
        elif show == "resources":
            sort_field = "title"
    certname1_params = {"order_by": {"order_field": {"field": sort_field, "order": "asc"}}}
    certname2_params = {"order_by": {"order_field": {"field": sort_field, "order": "asc"}}}
    certname1_data = puppetdb.api_get(
        path="/catalogs/%s/%s" % (certname1, show),
        api_url=source_url,
        api_version="v4",
        params=puppetdb.mk_puppetdb_query(certname1_params, request),
    )
    certname1_data = json.dumps(certname1_data, indent=2)
    certname2_data = puppetdb.api_get(
        path="/catalogs/%s/%s" % (certname2, show),
        api_url=source_url,
        api_version="v4",
        params=puppetdb.mk_puppetdb_query(certname2_params, request),
    )
    certname2_data = json.dumps(certname2_data, indent=2)

    from_split_lines = certname1_data.split("\n")
    to_split_lines = certname2_data.split("\n")
    diff = difflib.unified_diff(from_split_lines, to_split_lines)
    diff = "\n".join(list(diff))
    return HttpResponse(diff)
Ejemplo n.º 6
0
def search_nodes_json(request):
    source_url, source_certs, source_verify = get_server(request)
    if request.method == 'GET':
        if 'search' in request.GET:
            search = request.GET.get('search')

    # Create a search regex for certname spelt with below.
    nodes_params = {
        'query':
            {
                1: '["~","certname","' + search + '"]'
            },
        'order_by':
            {
                'order_field':
                    {
                        'field': 'certname',
                        'order': 'desc',
                    },
            },
        #'limit': 25,
    }
    nodes_list = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/nodes',
        api_version='v4',
        params=puppetdb.mk_puppetdb_query(
            nodes_params, request),
    )
    return HttpResponse(json.dumps(nodes_list), content_type="application/json")
Ejemplo n.º 7
0
def get_events_summary(request, timespan='latest'):
    if timespan == 'latest':
        events_params = {
            'query':
                {
                    'operator': 'and',
                    1: '["=","latest_report?",true]',
                    2: '["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]',
                },
        }
    source_url, source_certs, source_verify = get_server(request)
    events = pdb_api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/events',
        api_version='v4',
        params=mk_puppetdb_query(events_params, request))
    summary = summary_of_events(events)
    return summary
Ejemplo n.º 8
0
def get_events_summary(request, timespan='latest', environment=None):
    events_params = {
        'query':
            {
                'operator': 'and',
                2: '["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]',
            },
    }
    if timespan == 'latest':
        events_params['query'][1] = '["=","latest_report?",true]'
    elif len(timespan) == 2:
        events_params['query'][1] = '["and",[">","timestamp","' + timespan[0] + '"],["<", "timestamp", "' + timespan[
            1] + '"]]'

    source_url, source_certs, source_verify = get_server(request)
    events = pdb_api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/events',
        api_version='v4',
        params=mk_puppetdb_query(events_params, request))
    summary = summary_of_events(events)
    return summary
Ejemplo n.º 9
0
def reports_search_json(request):
    context = dict()
    if request.method == 'GET':
        if 'search' in request.GET:
            search = request.GET.get('search')
        if 'certname' in request.GET:
            certname = request.GET.get('certname')
        if not certname or not search:
            context['error'] = 'Must specify both certname and search query.'
            return HttpResponse(json.dumps(context), content_type="application/json")
    source_url, source_certs, source_verify = get_server(request)
    # Redirects to the events page if GET param latest is true..
    reports_params = {
        'query':
            {
                'operator': 'and',
                1: '["=","certname","' + certname + '"]',
                2: '["~","hash","^' + search + '"]'
            },
        'order_by':
            {
                'order_field':
                    {
                        'field': 'start_time',
                        'order': 'desc',
                    },
            }
    }

    reports_list = puppetdb.api_get(
        path='/reports',
        api_url=source_url,
        api_version='v4',
        params=puppetdb.mk_puppetdb_query(reports_params, request),
    )
    return HttpResponse(json.dumps(reports_list), content_type="application/json")
Ejemplo n.º 10
0
def dashboard_json(request):
    context = {}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)

    puppet_run_time = get_server(request, type='run_time')
    dashboard_show = request.GET.get('show', 'recent')
    events_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
        },
        'summarize_by': 'certname',
    }
    reports_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
        }
    }
    nodes_params = {
        'limit': 25,
        'order_by': {
            'order_field': {
                'field': 'report_timestamp',
                'order': 'desc',
            },
            'query_field': {
                'field': 'certname'
            },
        },
    }

    jobs = {
        'tot_resource': {
            'url':
            source_url,
            'certs':
            source_certs,
            'verify':
            source_verify,
            'id':
            'tot_resource',
            'path':
            'mbeans/puppetlabs.puppetdb.query.population:type=default,name=num-resources',
        },
        'avg_resource': {
            'url':
            source_url,
            'certs':
            source_certs,
            'verify':
            source_verify,
            'id':
            'avg_resource',
            'path':
            'mbeans/puppetlabs.puppetdb.query.population:type=default,name=avg-resources-per-node',
        },
        'all_nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'all_nodes',
            'path': '/nodes',
            'request': request
        },
        'events': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'event_counts',
            'path': '/event-counts',
            'api_version': 'v4',
            'params': events_params,
            'request': request
        },
        'reports': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'reports',
            'path': '/reports',
            'params': reports_params,
            'request': request
        },
        'nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'nodes',
            'path': '/nodes',
            'params': nodes_params,
            'request': request
        },
    }
    puppetdb_results = run_puppetdb_jobs(jobs)

    # Assign vars from the completed jobs
    # Number of results from all_nodes is our population.
    puppet_population = len(puppetdb_results['all_nodes'])
    # Total resources managed by puppet metric
    total_resources = puppetdb_results['tot_resource']
    # Average resource per node metric
    avg_resource_node = puppetdb_results['avg_resource']
    # Information about all active nodes in puppet
    all_nodes_list = puppetdb_results['all_nodes']
    # All available events for the latest puppet reports
    event_list = puppetdb_results['event_counts']
    event_dict = {item['subject']['title']: item for item in event_list}
    # All of the latest reports
    reports_list = puppetdb_results['reports']
    reports_dict = {item['certname']: item for item in reports_list}
    # 25 Nodes
    node_list = puppetdb_results['nodes']

    failed_list, changed_list, unreported_list, mismatch_list, pending_list = dictstatus(
        all_nodes_list,
        reports_dict,
        event_dict,
        sort=True,
        sortby='latestReport',
        get_status='notall',
        puppet_run_time=puppet_run_time)

    pending_list = [x for x in pending_list if x not in unreported_list]
    changed_list = [
        x for x in changed_list if x not in unreported_list
        and x not in failed_list and x not in pending_list
    ]
    failed_list = [x for x in failed_list if x not in unreported_list]
    unreported_list = [x for x in unreported_list if x not in failed_list]

    node_unreported_count = len(unreported_list)
    node_fail_count = len(failed_list)
    node_change_count = len(changed_list)
    node_off_timestamps_count = len(mismatch_list)
    node_pending_count = len(pending_list)

    if dashboard_show == 'recent':
        merged_nodes_list = dictstatus(node_list,
                                       reports_dict,
                                       event_dict,
                                       sort=False,
                                       get_status="all",
                                       puppet_run_time=puppet_run_time)
    elif dashboard_show == 'failed':
        merged_nodes_list = failed_list
    elif dashboard_show == 'unreported':
        merged_nodes_list = unreported_list
    elif dashboard_show == 'changed':
        merged_nodes_list = changed_list
    elif dashboard_show == 'mismatch':
        merged_nodes_list = mismatch_list
    elif dashboard_show == 'pending':
        merged_nodes_list = pending_list
    else:
        merged_nodes_list = dictstatus(node_list,
                                       reports_dict,
                                       event_dict,
                                       sort=False,
                                       get_status="all",
                                       puppet_run_time=puppet_run_time)

    context['node_list'] = merged_nodes_list
    context['selected_view'] = dashboard_show
    context['population'] = puppet_population
    context['total_resource'] = total_resources['Value']
    context['avg_resource'] = "{:.2f}".format(avg_resource_node['Value'])
    context['failed_nodes'] = node_fail_count
    context['changed_nodes'] = node_change_count
    context['unreported_nodes'] = node_unreported_count
    context['mismatching_timestamps'] = node_off_timestamps_count
    context['pending_nodes'] = node_pending_count

    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 11
0
def dashboard_nodes_json(request):
    context = {}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)

    puppet_run_time = get_server(request, type='run_time')

    # Dashboard to show nodes of "recent, failed, unreported or changed"
    dashboard_show = request.GET.get('show', 'recent')
    events_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
        },
        'summarize_by': 'certname',
    }
    all_nodes_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
        },
    }
    reports_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
        }
    }
    nodes_params = {
        'limit': 25,
        'order_by': {
            'order_field': {
                'field': 'report_timestamp',
                'order': 'desc',
            },
            'query_field': {
                'field': 'certname'
            },
        },
    }

    jobs = {
        'all_nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'all_nodes',
            'path': '/nodes',
            'request': request
        },
        'events': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'event_counts',
            'path': 'event-counts',
            'api_version': 'v4',
            'params': events_params,
            'request': request
        },
        'nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'nodes',
            'path': '/nodes',
            'params': nodes_params,
            'request': request
        },
        'reports': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'reports',
            'path': '/reports',
            'params': reports_params,
            'request': request
        },
    }

    puppetdb_results = run_puppetdb_jobs(jobs)
    # Information about all active nodes in puppet
    all_nodes_list = puppetdb_results['all_nodes']
    # All available events for the latest puppet reports
    event_list = puppetdb_results['event_counts']
    event_dict = {item['subject']['title']: item for item in event_list}
    # All of the latest reports
    reports_list = puppetdb_results['reports']
    reports_dict = {item['certname']: item for item in reports_list}
    # 25 Nodes
    node_list = puppetdb_results['nodes']

    failed_list, changed_list, unreported_list, mismatch_list, pending_list = dictstatus(
        all_nodes_list,
        reports_dict,
        event_dict,
        sort=True,
        sortby='latestReport',
        get_status='notall',
        puppet_run_time=puppet_run_time)
    pending_list = [x for x in pending_list if x not in unreported_list]
    changed_list = [
        x for x in changed_list if x not in unreported_list
        and x not in failed_list and x not in pending_list
    ]
    failed_list = [x for x in failed_list if x not in unreported_list]
    unreported_list = [x for x in unreported_list if x not in failed_list]

    if dashboard_show == 'recent':
        merged_nodes_list = dictstatus(node_list,
                                       reports_dict,
                                       event_dict,
                                       sort=False,
                                       get_status="all",
                                       puppet_run_time=puppet_run_time)
    elif dashboard_show == 'failed':
        merged_nodes_list = failed_list
    elif dashboard_show == 'unreported':
        merged_nodes_list = unreported_list
    elif dashboard_show == 'changed':
        merged_nodes_list = changed_list
    elif dashboard_show == 'mismatch':
        merged_nodes_list = mismatch_list
    elif dashboard_show == 'pending':
        merged_nodes_list = pending_list
    else:
        merged_nodes_list = dictstatus(node_list,
                                       reports_dict,
                                       event_dict,
                                       sort=False,
                                       get_status="all",
                                       puppet_run_time=puppet_run_time)

    context['node_list'] = merged_nodes_list
    context['selected_view'] = dashboard_show

    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 12
0
def nodes_json(request):
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)
    puppet_run_time = get_server(request, type='run_time')
    valid_sort_fields = ('certname', 'catalog_timestamp', 'report_timestamp',
                         'facts_timestamp', 'successes', 'noops', 'failures',
                         'skips')
    try:
        # If user requested to download csv formatted file. Default value is False
        dl_csv = request.GET.get('dl_csv', False)
        if dl_csv == 'true':
            dl_csv = True
        else:
            dl_csv = False
        # Add limits to session
        if request.GET.get('limits', False):
            if request.session['limits'] != int(request.GET.get('limits', 50)):
                request.session['limits'] = int(request.GET.get('limits', 50))
            if request.session['limits'] <= 0:
                request.session['limits'] = 50
        else:
            if 'limits' not in request.session:
                request.session['limits'] = 50

        # Cur Page Number
        if request.GET.get('page', False):
            if request.session['page'] != int(request.GET.get('page', 1)):
                request.session['page'] = int(request.GET.get('page', 1))
            if request.session['page'] <= 0:
                request.session['page'] = 1
        else:
            if 'page' not in request.session:
                request.session['page'] = 1

        # Cur sort field
        if request.GET.get('sortfield', False):
            if request.session['sortfield'] != request.GET.get('sortfield'):
                request.session['sortfield'] = request.GET.get('sortfield')
            if request.session['sortfield'] not in valid_sort_fields:
                request.session['sortfield'] = 'report_timestamp'
        else:
            if 'sortfield' not in request.session:
                request.session['sortfield'] = 'report_timestamp'

        # Cur sort order
        if request.GET.get('sortfieldby', False):
            avail_sortorder = ['asc', 'desc']
            if request.session['sortfieldby'] != request.GET.get(
                    'sortfieldby'):
                request.session['sortfieldby'] = request.GET.get('sortfieldby')
            if request.session['sortfieldby'] not in avail_sortorder:
                request.session['sortfieldby'] = 'desc'
        else:
            if 'sortfieldby' not in request.session:
                request.session['sortfieldby'] = 'desc'
        # Search parameters takes a valid puppetdb query string
        if request.GET.get('search', False):
            if 'search' in request.session and (request.session['search']
                                                == request.GET.get('search')):
                pass
            else:
                if request.GET.get('search') == 'clear_rules':
                    request.session['sortfield'] = 'report_timestamp'
                    request.session['sortfieldby'] = 'desc'
                    request.session['page'] = 1
                    request.session['search'] = None
                else:
                    request.session['page'] = 1
                    request.session['search'] = request.GET.get('search')
        else:
            if 'search' not in request.session:
                request.session['sortfield'] = 'report_timestamp'
                request.session['sortfieldby'] = 'desc'
                request.session['page'] = 1
                request.session['search'] = None

        # Set offset
        request.session['offset'] = (
            request.session['limits'] *
            request.session['page']) - request.session['limits']
    except:
        return HttpResponseBadRequest('Oh no! Your filters were invalid.')

    # Valid sort field that the user can search agnaist.
    sort_field = request.session['sortfield']
    sort_field_order = request.session['sortfieldby']
    page_num = request.session['page']

    if request.session['search'] is not None:
        node_params = {
            'query': {
                1: request.session['search']
            },
        }
    else:
        node_params = {
            'query': {},
        }

    nodes_sort_fields = [
        'certname', 'catalog_timestamp', 'report_timestamp', 'facts_timestamp'
    ]
    if sort_field in nodes_sort_fields:
        node_params['order_by'] = {
            'order_field': {
                'field': sort_field,
                'order': sort_field_order,
            },
        }
        if dl_csv is False:
            node_params['limit'] = request.session['limits']
            node_params['offset'] = request.session['offset']
    node_params['include_total'] = 'true'

    node_sort_fields = [
        'certname', 'catalog_timestamp', 'report_timestamp', 'facts_timestamp'
    ]
    try:
        node_list, node_headers = puppetdb.api_get(
            api_url=source_url,
            cert=source_certs,
            verify=source_verify,
            path='/nodes',
            api_version='v4',
            params=puppetdb.mk_puppetdb_query(node_params, request),
        )
    except:
        node_list = []
        node_headers = dict()
        node_headers['X-Records'] = 0

    status_sort_fields = ['successes', 'failures', 'skips', 'noops']
    # Create a filter part to limit the following API requests to data related to the node_list.
    # Skipt the filter completely if a large number of nodes are shown as the query tends to fail.
    node_filter = ''
    if len(node_list) <= 100 and sort_field not in status_sort_fields:
        node_filter = ', ["or"'
        for n in node_list:
            node_filter += ',["=","certname","%s"]' % n['certname']
        node_filter += ']'

    # Work out the number of pages from the xrecords response
    # return fields that you can sort by
    # for each node in the node_list, find out if the latest run has any failures
    # v3/event-counts --data-urlencode query='["=","latest-report?",true]'
    # --data-urlencode summarize-by='certname'
    report_params = {
        'query': {
            1:
            '["and" %s, ["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            % node_filter,
        },
        'summarize_by': 'certname',
    }

    if sort_field in status_sort_fields:
        report_params['order_by'] = {
            'order_field': {
                'field': sort_field,
                'order': sort_field_order,
            }
        }
        report_params['include_total'] = 'true'

        report_list, report_headers = puppetdb.api_get(
            api_url=source_url,
            cert=source_certs,
            verify=source_verify,
            path='/event-counts',
            params=puppetdb.mk_puppetdb_query(report_params, request),
            api_version='v4',
        )
    else:
        report_list = puppetdb.api_get(
            api_url=source_url,
            cert=source_certs,
            verify=source_verify,
            path='event-counts',
            params=puppetdb.mk_puppetdb_query(report_params, request),
            api_version='v4',
        )
    # number of results not depending on sort field.
    xrecords = node_headers['X-Records']
    total_results = xrecords

    num_pages_wdec = float(xrecords) / request.session['limits']
    num_pages_wodec = float("{:.0f}".format(num_pages_wdec))
    if num_pages_wdec > num_pages_wodec:
        num_pages = num_pages_wodec + 1
    else:
        num_pages = num_pages_wodec

    # Converts lists of dicts to dicts.
    report_dict = {item['subject']['title']: item
                   for item in report_list}  # /events-count
    if sort_field_order == 'desc':
        rows = dictstatus(node_list,
                          None,
                          report_dict,
                          sortby=sort_field,
                          asc=True,
                          sort=False,
                          puppet_run_time=puppet_run_time,
                          format_time=False)
        sort_field_order_opposite = 'asc'
    elif sort_field_order == 'asc':
        rows = dictstatus(node_list,
                          None,
                          report_dict,
                          sortby=sort_field,
                          asc=False,
                          sort=False,
                          puppet_run_time=puppet_run_time,
                          format_time=False)
        sort_field_order_opposite = 'desc'

    if dl_csv is True:
        if rows is []:
            pass
        else:
            # Generate a sequence of rows. The range is based on the maximum number of
            # rows that can be handled by a single sheet in most spreadsheet
            # applications.
            include_facts = request.GET.get('include_facts', False)
            csv_headers = [
                'Certname', 'Latest Catalog', 'Latest Report', 'Latest Facts',
                'Success', 'Noop', 'Failure', 'Skipped', 'Run Status'
            ]
            if include_facts is not False:
                merged_list_facts = []
                facts = {}
                for fact in include_facts.split(','):
                    # Sanitize the fact input from the user
                    fact = fact.strip()
                    # Add the fact name to the headers list
                    csv_headers.append(fact)

                    # build the params for each fact.
                    facts_params = {
                        'query': {
                            1: '["=","name","' + fact + '"]'
                        },
                    }
                    fact_list = puppetdb.api_get(
                        api_url=source_url,
                        cert=source_certs,
                        verify=source_verify,
                        path='facts',
                        params=puppetdb.mk_puppetdb_query(facts_params),
                        api_version='v4',
                    )
                    # Populate the facts dict with the facts we have retrieved
                    # Convert the fact list into a fact dict!
                    facts[fact] = {
                        item['certname']: item
                        for item in fact_list
                    }

                i = 1
                jobs = {}
                # Add ID to each job so that it can be assembled in
                # the same order after we recieve the job results
                # We do this via jobs so that we can get faster results.
                for node in rows:
                    jobs[i] = {
                        'id': i,
                        'include_facts': include_facts.split(','),
                        'node': node,
                        'facts': facts,
                    }
                    i += 1

                csv_results = generate_csv(jobs)
                rows = []
                i = 1
                # with the job results we can now recreate merged_list
                # in the order we sent them.
                while i <= len(csv_results):
                    rows.append(csv_results[i])
                    i += 1
            # Insert the csv header to the top of the list.
            rows.insert(0, csv_headers)
            pseudo_buffer = Echo()
            writer = csv.writer(pseudo_buffer)
            response = StreamingHttpResponse(
                (writer.writerow(row) for row in rows),
                content_type="text/csv")
            response[
                'Content-Disposition'] = 'attachment; filename="puppetdata-%s.csv"' % (
                    datetime.datetime.now())
            return response

    if sort_field in status_sort_fields:
        rows = rows[request.session['offset']:(request.session['limits'] +
                                               request.session['offset'])]
    """
    c_r_s* = current request sort
    c_r_* = current req
    r_s* = requests available
    """
    context = {
        'nodeList': rows,
        'total_nodes': total_results,
        'c_r_page': page_num,
        'c_r_limit': request.session['limits'],
        'r_sfield': valid_sort_fields,
        'c_r_sfield': sort_field,
        'r_sfieldby': ['asc', 'desc'],
        'c_r_sfieldby': sort_field_order,
        'c_r_sfieldby_o': sort_field_order_opposite,
        'tot_pages': '{0:g}'.format(num_pages),
    }
    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 13
0
def detailed_events(request, hashid=None):
    context = {
        'timezones': pytz.common_timezones,
        'SOURCES': AVAILABLE_SOURCES
    }
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)
    report_timestamp = request.GET.get('report_timestamp')
    events_params = {
        'query': {
            1: '["=","report","' + hashid + '"]'
        },
        'order_by': {
            'order_field': {
                'field': 'timestamp',
                'order': 'asc',
            },
            'query_field': {
                'field': 'certname'
            },
        },
    }
    events_list = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/events',
        api_version='v4',
        params=puppetdb.mk_puppetdb_query(events_params),
    )
    environment = ''
    certname = ''
    event_execution_times = []
    sorted_events = None
    last_event_time = None
    last_event_title = None
    run_end_time = None

    if len(events_list) != 0:
        single_event = events_list[0]
        environment = single_event['environment']
        certname = single_event['certname']
        for event in events_list:
            event_title = event['resource_title']
            event_start_time = json_to_datetime(event['timestamp'])
            if last_event_time is None and last_event_title is None:
                last_event_time = event_start_time
                last_event_title = event_title
                run_end_time = json_to_datetime(event['run_end_time'])
                continue
            else:
                event_exec_time = (event_start_time -
                                   last_event_time).total_seconds()
                add_event = (last_event_title, event_exec_time)
                event_execution_times.append(add_event)
                last_event_time = event_start_time
                last_event_title = event_title
        event_exec_time = (last_event_time - run_end_time).total_seconds()
        add_event = [last_event_title, event_exec_time]
        event_execution_times.append(add_event)
        sorted_events = sorted(event_execution_times,
                               reverse=True,
                               key=lambda field: field[1])
        if len(sorted_events) > 10:
            sorted_events = sorted_events[:10]
    else:
        events_list = False
    context['certname'] = certname
    context['report_timestamp'] = report_timestamp
    context['hashid'] = hashid
    context['events_list'] = events_list
    context['event_durations'] = sorted_events
    context['environment'] = environment

    return render(request, 'pano/detailed_events.html', context)
Ejemplo n.º 14
0
def dashboard_json(request):
    context = {}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)

    puppet_run_time = get_server(request, type='run_time')
    dashboard_show = request.GET.get('show', 'recent')
    events_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            },
        'summarize_by': 'certname',
    }
    reports_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            }
    }
    nodes_params = {
        'limit': 25,
        'order_by': {
            'order_field': {
                'field': 'report_timestamp',
                'order': 'desc',
            },
            'query_field': {'field': 'certname'},
        },
    }

    jobs = {
        'tot_resource': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'tot_resource',
            'path': 'mbeans/puppetlabs.puppetdb.query.population:type=default,name=num-resources',
        },
        'avg_resource': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'avg_resource',
            'path': 'mbeans/puppetlabs.puppetdb.query.population:type=default,name=avg-resources-per-node',
        },
        'all_nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'all_nodes',
            'path': '/nodes',
            'request': request
        },
        'events': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'event_counts',
            'path': '/event-counts',
            'api_version': 'v4',
            'params': events_params,
            'request': request
        },
        'reports': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'reports',
            'path': '/reports',
            'params': reports_params,
            'request': request
        },
        'nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'nodes',
            'path': '/nodes',
            'params': nodes_params,
            'request': request
        },
    }
    puppetdb_results = run_puppetdb_jobs(jobs)

    # Assign vars from the completed jobs
    # Number of results from all_nodes is our population.
    puppet_population = len(puppetdb_results['all_nodes'])
    # Total resources managed by puppet metric
    total_resources = puppetdb_results['tot_resource']
    # Average resource per node metric
    avg_resource_node = puppetdb_results['avg_resource']
    # Information about all active nodes in puppet
    all_nodes_list = puppetdb_results['all_nodes']
    # All available events for the latest puppet reports
    event_list = puppetdb_results['event_counts']
    event_dict = {item['subject']['title']: item for item in event_list}
    # All of the latest reports
    reports_list = puppetdb_results['reports']
    reports_dict = {item['certname']: item for item in reports_list}
    # 25 Nodes
    node_list = puppetdb_results['nodes']

    failed_list, changed_list, unreported_list, mismatch_list, pending_list = dictstatus(all_nodes_list,
                                                                                         reports_dict,
                                                                                         event_dict,
                                                                                         sort=True,
                                                                                         sortby='latestReport',
                                                                                         get_status='notall',
                                                                                         puppet_run_time=puppet_run_time)

    pending_list = [x for x in pending_list if x not in unreported_list]
    changed_list = [x for x in changed_list if
                    x not in unreported_list and x not in failed_list and x not in pending_list]
    failed_list = [x for x in failed_list if x not in unreported_list]
    unreported_list = [x for x in unreported_list if x not in failed_list]

    node_unreported_count = len(unreported_list)
    node_fail_count = len(failed_list)
    node_change_count = len(changed_list)
    node_off_timestamps_count = len(mismatch_list)
    node_pending_count = len(pending_list)

    if dashboard_show == 'recent':
        merged_nodes_list = dictstatus(
            node_list, reports_dict, event_dict, sort=False, get_status="all", puppet_run_time=puppet_run_time)
    elif dashboard_show == 'failed':
        merged_nodes_list = failed_list
    elif dashboard_show == 'unreported':
        merged_nodes_list = unreported_list
    elif dashboard_show == 'changed':
        merged_nodes_list = changed_list
    elif dashboard_show == 'mismatch':
        merged_nodes_list = mismatch_list
    elif dashboard_show == 'pending':
        merged_nodes_list = pending_list
    else:
        merged_nodes_list = dictstatus(
            node_list, reports_dict, event_dict, sort=False, get_status="all", puppet_run_time=puppet_run_time)

    context['node_list'] = merged_nodes_list
    context['selected_view'] = dashboard_show
    context['population'] = puppet_population
    context['total_resource'] = total_resources['Value']
    context['avg_resource'] = "{:.2f}".format(avg_resource_node['Value'])
    context['failed_nodes'] = node_fail_count
    context['changed_nodes'] = node_change_count
    context['unreported_nodes'] = node_unreported_count
    context['mismatching_timestamps'] = node_off_timestamps_count
    context['pending_nodes'] = node_pending_count

    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 15
0
def dashboard_nodes_json(request):
    context = {}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)

    puppet_run_time = get_server(request, type='run_time')

    # Dashboard to show nodes of "recent, failed, unreported or changed"
    dashboard_show = request.GET.get('show', 'recent')
    events_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            },
        'summarize_by': 'certname',
    }
    all_nodes_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            },
    }
    reports_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            }
    }
    nodes_params = {
        'limit': 25,
        'order_by': {
            'order_field': {
                'field': 'report_timestamp',
                'order': 'desc',
            },
            'query_field': {'field': 'certname'},
        },
    }

    jobs = {
        'all_nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'all_nodes',
            'path': '/nodes',
            'request': request
        },
        'events': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'event_counts',
            'path': 'event-counts',
            'api_version': 'v4',
            'params': events_params,
            'request': request
        },
        'nodes': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'nodes',
            'path': '/nodes',
            'params': nodes_params,
            'request': request
        },
        'reports': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'api_version': 'v4',
            'id': 'reports',
            'path': '/reports',
            'params': reports_params,
            'request': request
        },
    }

    puppetdb_results = run_puppetdb_jobs(jobs)
    # Information about all active nodes in puppet
    all_nodes_list = puppetdb_results['all_nodes']
    # All available events for the latest puppet reports
    event_list = puppetdb_results['event_counts']
    event_dict = {item['subject']['title']: item for item in event_list}
    # All of the latest reports
    reports_list = puppetdb_results['reports']
    reports_dict = {item['certname']: item for item in reports_list}
    # 25 Nodes
    node_list = puppetdb_results['nodes']

    failed_list, changed_list, unreported_list, mismatch_list, pending_list = dictstatus(all_nodes_list,
                                                                                         reports_dict,
                                                                                         event_dict,
                                                                                         sort=True,
                                                                                         sortby='latestReport',
                                                                                         get_status='notall',
                                                                                         puppet_run_time=puppet_run_time)
    pending_list = [x for x in pending_list if x not in unreported_list]
    changed_list = [x for x in changed_list if
                    x not in unreported_list and x not in failed_list and x not in pending_list]
    failed_list = [x for x in failed_list if x not in unreported_list]
    unreported_list = [x for x in unreported_list if x not in failed_list]

    if dashboard_show == 'recent':
        merged_nodes_list = dictstatus(
            node_list, reports_dict, event_dict, sort=False, get_status="all", puppet_run_time=puppet_run_time)
    elif dashboard_show == 'failed':
        merged_nodes_list = failed_list
    elif dashboard_show == 'unreported':
        merged_nodes_list = unreported_list
    elif dashboard_show == 'changed':
        merged_nodes_list = changed_list
    elif dashboard_show == 'mismatch':
        merged_nodes_list = mismatch_list
    elif dashboard_show == 'pending':
        merged_nodes_list = pending_list
    else:
        merged_nodes_list = dictstatus(
            node_list, reports_dict, event_dict, sort=False, get_status="all", puppet_run_time=puppet_run_time)

    context['node_list'] = merged_nodes_list
    context['selected_view'] = dashboard_show

    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 16
0
def get_file(request, certname, environment, rtitle, rtype, md5sum_from=None, md5sum_to=None, diff=False,
             file_status='from'):
    puppetdb_source, puppetdb_certs, puppetdb_verify = get_server(request=request)
    filebucket_source, filebucket_certs, filebucket_verify, filebucket_show = get_server(request=request,
                                                                                         type='filebucket')
    fileserver_source, fileserver_certs, fileserver_verify, fileserver_show = get_server(request=request,
                                                                                         type='fileserver')
    # If Clientbucket is enabled continue else return False

    def fetch_filebucket(url, method):
        headers = {
            'Accept': 's',
        }
        methods = {'get': requests.get,
                   'head': requests.head,
                   }
        if method not in methods:
            print('No can has method: %s' % method)
            return False
        resp = methods[method](url,
                               headers=headers,
                               verify=filebucket_verify,
                               cert=filebucket_certs)
        if resp.status_code != 200:
            return False
        else:
            return resp.text

    def fetch_fileserver(url, method):
        methods = {'get': requests.get,
                   }

        if method not in methods:
            print('No can has method: %s' % method)
            return False
        resp = methods[method](url,
                               verify=fileserver_verify,
                               cert=fileserver_certs)
        if resp.status_code != 200:
            return False
        else:
            return resp.text

    def get_resource(certname, rtype, rtitle):
        resource_params = {
            'query':
                {
                    'operator': 'and',
                    1: '["=", "certname", "' + certname + '"]',
                    2: '["=", "type", "' + rtype + '"]',
                    3: '["=", "title", "' + rtitle + '"]'

                },
        }
        data = pdb_api_get(
            api_url=puppetdb_source,
            path='resources',
            verify=puppetdb_verify,
            cert=puppetdb_certs,
            params=mk_puppetdb_query(resource_params, request))
        if not data:
            return False
        else:
            return data

    if not filebucket_show or not fileserver_source:
        return False
    if file_status == 'both':
        if md5sum_to and md5sum_from and certname and rtitle and rtype:
            if diff:
                # is the hash from puppetdb resource same as md5sum_to
                hash_matches = False

                md5sum_from = md5sum_from.replace('{md5}', '')
                md5sum_to = md5sum_to.replace('{md5}', '')

                from_url = filebucket_source + environment + '/file_bucket_file/md5/' + md5sum_from
                to_url = filebucket_source + environment + '/file_bucket_file/md5/' + md5sum_to

                if fetch_filebucket(from_url, 'head') is not False:
                    resource_from = fetch_filebucket(from_url, 'get')
                else:
                    # Could not find old MD5 in Filebucket
                    return False
                if fetch_filebucket(to_url, 'head') is not False:
                    resource_to = fetch_filebucket(to_url, 'get')
                # Try puppetdb resources if not found in filebucket.
                else:
                    resource_to = get_resource(certname=certname, rtype=rtype, rtitle=rtitle)
                    if resource_to is False:
                        # Could not find new file in Filebucket or as a PuppetDB Resource
                        return False
                    else:
                        resource_to = resource_to[0]
                    if 'content' in resource_to['parameters']:
                        resource_to = resource_to['parameters']['content']
                        hash_of_resource = get_hash(resource_to)
                        if hash_of_resource == md5sum_to:
                            # file from resource matches filebucket md5 hash
                            hash_matches = True
                    # Solve the viewing of source files by retrieving it from Puppetmaster
                    elif 'source' in resource_to['parameters'] and fileserver_show is True:
                        source_path = resource_to['parameters']['source']
                        if source_path.startswith('puppet://'):
                            # extract the path for the file
                            source_path = source_path.split('/')  # ['puppet:', '', '', 'files', 'autofs', 'auto.home']
                            source_path = '/'.join(source_path[3:])  # Skip first 3 entries since they are not needed
                            # https://puppetmaster.example.com:8140/production/file_content/files/autofs/auto.home
                            url = fileserver_source + environment + '/file_content/' + source_path
                            resource_to = fetch_fileserver(url, 'get')
                    else:
                        return False
                # now that we have come this far, we have both files.
                # Lets differentiate the shit out of these files.
                from_split_lines = resource_from.split('\n')
                to_split_lines = resource_to.split('\n')
                diff = difflib.unified_diff(from_split_lines, to_split_lines)
                diff = ('\n'.join(list(diff))).split('\n')
                return diff
            else:
                return False
        else:
            return False

    if file_status == 'from' and md5sum_from:
        md5sum = md5sum_from.replace('{md5}', '')
    elif file_status == 'to' and md5sum_to:
        md5sum = md5sum_to.replace('{md5}', '')
    else:
        return False
    # Creates headers and url from the data we got

    url_clientbucket = filebucket_source + environment + '/file_bucket_file/md5/' + md5sum
    if fetch_filebucket(url_clientbucket, 'head') is False:
        # Check if theres a resource available for the latest file available
        if file_status == 'to':
            resp_pdb = get_resource(certname=certname, rtype=rtype, rtitle=rtitle)
            # we got the data lets give the user the good news.
            if resp_pdb:
                resource_data = resp_pdb[0]
                if 'content' in resource_data['parameters']:
                    prepend_text = 'This file with MD5 %s was found in PuppetDB Resources.\n\n' % (
                        get_hash(resource_data['parameters']['content']))
                    return prepend_text + resource_data['parameters']['content']
                elif 'source' in resource_data['parameters'] and fileserver_show is True:
                    source_path = resource_data['parameters']['source']
                    if source_path.startswith('puppet://'):
                        # extract the path for the file
                        source_path = source_path.split('/')  # ['puppet:', '', '', 'files', 'autofs', 'auto.home']
                        source_path = '/'.join(source_path[3:])  # Skip first 3 entries since they are not needed
                        # https://puppetmaster.example.com:8140/production/file_content/files/autofs/auto.home
                        url = fileserver_source + environment + '/file_content/' + source_path
                        source_content = fetch_fileserver(url, 'get')
                        prepend_text = 'This file with MD5 %s was retrieved from the PuppetMaster Fileserver.\n\n' % (
                            get_hash(source_content))
                        return prepend_text + source_content
                    else:
                        return False
                else:
                    return False
            # the file can't be found as a resource and or fileserver support not enabled
            else:
                return False
        # We probably don't want to search for resources if its the old file.
        else:
            return False
    else:
        filebucket_results = fetch_filebucket(url_clientbucket, 'get')
        prepend_text = 'This file with MD5 %s was found in Filebucket.\n\n' % (md5sum)
        return prepend_text + filebucket_results
Ejemplo n.º 17
0
def reports_json(request, certname=None):
    source_url, source_certs, source_verify = get_server(request)
    # Redirects to the events page if GET param latest is true..
    context = {}
    # Cur Page Number
    if request.GET.get('page', False):
        if request.session['report_page'] != int(request.GET.get('page', 1)):
            request.session['report_page'] = int(request.GET.get('page', 1))
        if request.session['report_page'] <= 0:
            request.session['report_page'] = 1
    else:
        if 'report_page' not in request.session:
            request.session['report_page'] = 1
    if request.session['report_page'] <= 0:
        offset = 0
    else:
        offset = (25 * request.session['report_page']) - 25
    reports_params = {
        'query': {
            1: '["=","certname","' + certname + '"]'
        },
        'order_by': {
            'order_field': {
                'field': 'start_time',
                'order': 'desc',
            },
        },
        'limit': 25,
        'include_total': 'true',
        'offset': offset,
    }
    reports_list, headers = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/reports',
        api_version='v4',
        params=puppetdb.mk_puppetdb_query(reports_params, request),
    )
    # Work out the number of pages from the xrecords response
    xrecords = headers['X-Records']
    num_pages_wdec = float(xrecords) / 25
    num_pages_wodec = float("{:.0f}".format(num_pages_wdec))
    if num_pages_wdec > num_pages_wodec:
        num_pages = num_pages_wodec + 1
    else:
        num_pages = num_pages_wodec

    report_status = []
    for report in reports_list:
        found_report = False
        events_params = {
            'query': {
                1: '["=","report","' + report['hash'] + '"]'
            },
            'summarize_by': 'certname',
        }
        eventcount_list = puppetdb.api_get(
            path='event-counts',
            api_url=source_url,
            api_version='v4',
            params=puppetdb.mk_puppetdb_query(events_params, request),
        )
        # Make list of the results
        for event in eventcount_list:
            if event['subject']['title'] == report['certname']:
                found_report = True
                report_status.append({
                    'hash':
                    report['hash'],
                    'certname':
                    report['certname'],
                    'environment':
                    report['environment'],
                    'start_time':
                    filters.date(
                        localtime(json_to_datetime(report['start_time'])),
                        'Y-m-d H:i:s'),
                    'end_time':
                    filters.date(
                        localtime(json_to_datetime(report['end_time'])),
                        'Y-m-d H:i:s'),
                    'events_successes':
                    event['successes'],
                    'events_noops':
                    event['noops'],
                    'events_failures':
                    event['failures'],
                    'events_skipped':
                    event['skips'],
                    'report_status':
                    report['status'],
                    'config_version':
                    report['configuration_version'],
                    'run_duration':
                    "{0:.0f}".format(
                        (json_to_datetime(report['end_time']) -
                         json_to_datetime(
                             report['start_time'])).total_seconds())
                })
                break
        if found_report is False:
            report_status.append({
                'hash':
                report['hash'],
                'certname':
                report['certname'],
                'environment':
                report['environment'],
                'start_time':
                filters.date(localtime(json_to_datetime(report['start_time'])),
                             'Y-m-d H:i:s'),
                'end_time':
                filters.date(localtime(json_to_datetime(report['end_time'])),
                             'Y-m-d H:i:s'),
                'events_successes':
                0,
                'events_noops':
                0,
                'events_failures':
                0,
                'events_skipped':
                0,
                'report_status':
                report['status'],
                'config_version':
                report['configuration_version'],
                'run_duration':
                "{0:.0f}".format(
                    (json_to_datetime(report['end_time']) -
                     json_to_datetime(report['start_time'])).total_seconds())
            })

    context['certname'] = certname
    context['reports_list'] = report_status
    context['curr_page'] = request.session['report_page']
    context['tot_pages'] = "{:.0f}".format(num_pages)
    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 18
0
def facts_json(request):
    context = {}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)

    certname = None
    facts = None
    if 'certname' in request.GET:
        certname = request.GET.get('certname')
    if 'facts' in request.GET:
        facts = request.GET.get('facts').split(',')

    if not certname:
        context['error'] = 'Certname not specified.'
        return HttpResponse(json.dumps(context))
    if facts:
        # validate string for illegal chars
        fact_query = list()
        for fact in facts:
            fact = fact.strip()
            # Match for characters that are not a-Z or 0-9 or _
            # if theres a match illegal chars exist...
            regex = re.compile(r'[^aA-zZ0-9_]')
            matches = regex.findall(fact)
            if matches:
                context['error'] = 'Illegal characters found in facts list. '
                context['error'] += 'Facts must not match anything withddd this regex <[^aA-zZ0-9_]>.'
                return HttpResponse(json.dumps(context))
            fact_query.append('["=","name","' + fact + '"]')
        fact_query = ','.join(fact_query)
        facts_params = {
            'query':
                {
                    1: '["and",["=","certname","' + certname + '"],["or",' + fact_query + ']]'
                },
            'order-by':
                {
                    'order_field':
                        {
                            'field': 'name',
                            'order': 'asc',
                        }
                }
        }
    else:
        facts_params = {
            'query':
                {
                    1: '["=","certname","' + certname + '"]'
                },
            'order_by':
                {
                    'order_field':
                        {
                            'field': 'name',
                            'order': 'asc',
                        }
                }
        }
    facts_list = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='facts',
        params=puppetdb.mk_puppetdb_query(
            facts_params, request),
    )
    context['certname'] = certname
    context['facts_list'] = facts_list

    return HttpResponse(json.dumps(context, indent=2), content_type="application/json")
Ejemplo n.º 19
0
def analytics(request):
    context = {
        'timezones': pytz.common_timezones,
        'SOURCES': AVAILABLE_SOURCES
    }
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)
    events_class_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]]'
        },
        'summarize_by': 'containing_class',
    }
    events_resource_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]]'
        },
        'summarize_by': 'resource',
    }
    events_status_params = {
        'query': {
            1:
            '["and",["=","latest_report?",true],["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]]'
        },
        'summarize_by': 'resource',
    }
    reports_runavg_params = {
        'limit': 100,
        'order_by': {
            'order_field': {
                'field': 'receive_time',
                'order': 'desc',
            },
            'query_field': {
                'field': 'certname'
            },
        },
    }
    jobs = {
        'events_class_list': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'events_class_list',
            'path': '/event-counts',
            'api_version': 'v4',
            'params': events_class_params,
            'request': request
        },
        'events_resource_list': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'events_resource_list',
            'path': '/event-counts',
            'api_version': 'v4',
            'params': events_resource_params,
            'request': request
        },
        'events_status_list': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'events_status_list',
            'path': '/aggregate-event-counts',
            'api_version': 'v4',
            'params': events_status_params,
            'request': request
        },
        'reports_run_avg': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'reports_run_avg',
            'path': '/reports',
            'api_version': 'v4',
            'params': reports_runavg_params,
            'request': request
        },
    }

    job_results = run_puppetdb_jobs(jobs, 4)

    reports_run_avg = job_results['reports_run_avg']
    events_class_list = job_results['events_class_list']
    events_resource_list = job_results['events_resource_list']
    events_status_list = job_results['events_status_list']

    num_runs_avg = len(reports_run_avg)
    run_avg_times = []
    avg_run_time = 0
    for report in reports_run_avg:
        run_time = "{0:.0f}".format(
            (json_to_datetime(report['end_time']) -
             json_to_datetime(report['start_time'])).total_seconds())
        avg_run_time += int(run_time)
        run_avg_times.append(run_time)
    if num_runs_avg != 0:
        avg_run_time = "{0:.0f}".format(avg_run_time / num_runs_avg)
    else:
        avg_run_time = 0

    class_event_results = []
    class_resource_results = []
    class_status_results = []

    for item in events_class_list:
        class_name = item['subject']['title']
        class_total = item['skips'] + item['failures'] + item['noops'] + item[
            'successes']
        class_event_results.append((class_name, class_total))

    for item in events_resource_list:
        class_name = item['subject']['type']
        class_total = item['skips'] + item['failures'] + item['noops'] + item[
            'successes']
        class_resource_results.append((class_name, class_total))
    print(events_status_list)
    if events_status_list:
        for status, value in events_status_list[0].items():
            print(status, value)
            if value is 0 or status == 'total' or status == 'summarize_by':
                continue
            class_status_results.append((status, value))

    context['class_events'] = class_event_results
    context['class_status'] = class_status_results
    context['resource_events'] = class_resource_results
    context['run_times'] = run_avg_times
    context['run_num'] = num_runs_avg
    context['run_avg'] = avg_run_time

    return render(request, 'pano/analytics/analytics.html', context)
Ejemplo n.º 20
0
def nodes_json(request):
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)
    valid_sort_fields = (
        'certname',
        'catalog_timestamp',
        'report_timestamp',
        'facts_timestamp',
        'successes',
        'noops',
        'failures',
        'skips')
    try:
        # If user requested to download csv formatted file. Default value is False
        dl_csv = request.GET.get('dl_csv', False)
        if dl_csv == 'true':
            dl_csv = True
        else:
            dl_csv = False
        # Add limits to session
        if request.GET.get('limits', False):
            if request.session['limits'] != int(request.GET.get('limits', 50)):
                request.session['limits'] = int(request.GET.get('limits', 50))
            if request.session['limits'] <= 0:
                request.session['limits'] = 50
        else:
            if 'limits' not in request.session:
                request.session['limits'] = 50

        # Cur Page Number
        if request.GET.get('page', False):
            if request.session['page'] != int(request.GET.get('page', 1)):
                request.session['page'] = int(request.GET.get('page', 1))
            if request.session['page'] <= 0:
                request.session['page'] = 1
        else:
            if 'page' not in request.session:
                request.session['page'] = 1

        # Cur sort field
        if request.GET.get('sortfield', False):
            if request.session['sortfield'] != request.GET.get('sortfield'):
                request.session['sortfield'] = request.GET.get('sortfield')
            if request.session['sortfield'] not in valid_sort_fields:
                request.session['sortfield'] = 'report_timestamp'
        else:
            if 'sortfield' not in request.session:
                request.session['sortfield'] = 'report_timestamp'

        # Cur sort order
        if request.GET.get('sortfieldby', False):
            avail_sortorder = ['asc', 'desc']
            if request.session['sortfieldby'] != request.GET.get('sortfieldby'):
                request.session['sortfieldby'] = request.GET.get('sortfieldby')
            if request.session['sortfieldby'] not in avail_sortorder:
                request.session['sortfieldby'] = 'desc'
        else:
            if 'sortfieldby' not in request.session:
                request.session['sortfieldby'] = 'desc'
        # Search parameters takes a valid puppetdb query string
        if request.GET.get('search', False):
            if 'search' in request.session and (request.session['search'] == request.GET.get('search')):
                pass
            else:
                if request.GET.get('search') == 'clear_rules':
                    request.session['sortfield'] = 'report_timestamp'
                    request.session['sortfieldby'] = 'desc'
                    request.session['page'] = 1
                    request.session['search'] = None
                else:
                    request.session['page'] = 1
                    request.session['search'] = request.GET.get('search')
        else:
            if 'search' not in request.session:
                request.session['sortfield'] = 'report_timestamp'
                request.session['sortfieldby'] = 'desc'
                request.session['page'] = 1
                request.session['search'] = None

        # Set offset
        request.session['offset'] = (request.session['limits'] * request.session['page']) - request.session[
            'limits']
    except:
        return HttpResponseBadRequest('Oh no! Your filters were invalid.')

    # Valid sort field that the user can search agnaist.
    sort_field = request.session['sortfield']
    sort_field_order = request.session['sortfieldby']
    page_num = request.session['page']

    if request.session['search'] is not None:
        node_params = {
            'query':
                {
                    1: request.session['search']
                },
        }
    else:
        node_params = {
            'query': {},
        }

    nodes_sort_fields = ['certname', 'catalog_timestamp', 'report_timestamp', 'facts_timestamp']
    if sort_field in nodes_sort_fields:
        node_params['order_by'] = {
            'order_field':
                {
                    'field': sort_field,
                    'order': sort_field_order,
                },
        }
        if dl_csv is False:
            node_params['limit'] = request.session['limits']
            node_params['offset'] = request.session['offset']
        node_params['include_total'] = 'true'
    else:
        node_params['order_by'] = {
            'order_field':
                {
                    'field': 'report_timestamp',
                    'order': 'desc',
                },
        }
    node_sort_fields = ['certname', 'catalog_timestamp', 'report_timestamp', 'facts_timestamp']
    if sort_field in node_sort_fields:
        try:
            node_list, node_headers = puppetdb.api_get(
                api_url=source_url,
                cert=source_certs,
                verify=source_verify,
                path='/nodes',
                api_version='v4',
                params=puppetdb.mk_puppetdb_query(
                    node_params, request),
            )
        except:
            node_list = []
            node_headers = dict()
            node_headers['X-Records'] = 0
    else:
        node_list = puppetdb.api_get(
            api_url=source_url,
            cert=source_certs,
            verify=source_verify,
            path='/nodes',
            api_version='v4',
            params=puppetdb.mk_puppetdb_query(
                node_params, request),
        )

    # Work out the number of pages from the xrecords response
    # return fields that you can sort by
    # for each node in the node_list, find out if the latest run has any failures
    # v3/event-counts --data-urlencode query='["=","latest-report?",true]'
    # --data-urlencode summarize-by='certname'
    report_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            },
        'summarize_by': 'certname',
    }
    status_sort_fields = ['successes', 'failures', 'skips', 'noops']

    report_status_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]]'
            }
    }
    report_status_list = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/reports',
        params=puppetdb.mk_puppetdb_query(report_status_params, request),
        api_version='v4',
    )
    if sort_field in status_sort_fields:
        if request.session['search'] is not None:
            report_params['query'] = {'operator': 'and',
                                      1: request.session['search'],
                                      2: '["=","latest_report?",true]',
                                      3: '["in", "certname",["extract", "certname",["select_nodes",["null?","deactivated",true]]]]',
                                      }
        report_params['order_by'] = {
            'order_field':
                {
                    'field': sort_field,
                    'order': sort_field_order,
                }
        }
        report_params['include_total'] = 'true'
        # Don't limit results if its CSV
        if dl_csv is False:
            report_params['limit'] = request.session['limits']
            report_params['offset'] = request.session['offset']

        report_list, report_headers = puppetdb.api_get(
            api_url=source_url,
            cert=source_certs,
            verify=source_verify,
            path='/event-counts',
            params=puppetdb.mk_puppetdb_query(report_params, request),
            api_version='v4',
        )
    else:
        report_list = puppetdb.api_get(
            api_url=source_url,
            cert=source_certs,
            verify=source_verify,
            path='event-counts',
            params=puppetdb.mk_puppetdb_query(report_params, request),
            api_version='v4',
        )
    # number of results depending on sort field.
    if sort_field in status_sort_fields:
        xrecords = report_headers['X-Records']
        total_results = xrecords
    elif sort_field in nodes_sort_fields:
        xrecords = node_headers['X-Records']
        total_results = xrecords

    num_pages_wdec = float(xrecords) / request.session['limits']
    num_pages_wodec = float("{:.0f}".format(num_pages_wdec))
    if num_pages_wdec > num_pages_wodec:
        num_pages = num_pages_wodec + 1
    else:
        num_pages = num_pages_wodec

    # Converts lists of dicts to dicts.
    status_dict = {item['certname']: item for item in report_status_list}
    report_dict = {item['subject']['title']: item for item in report_list}
    if sort_field_order == 'desc':
        rows = dictstatus(
            node_list, status_dict, report_dict, sortby=sort_field, asc=True, sort=False)
        sort_field_order_opposite = 'asc'
    elif sort_field_order == 'asc':
        rows = dictstatus(
            node_list, status_dict, report_dict, sortby=sort_field, asc=False, sort=False)
        sort_field_order_opposite = 'desc'

    if dl_csv is True:
        if rows is []:
            pass
        else:
            # Generate a sequence of rows. The range is based on the maximum number of
            # rows that can be handled by a single sheet in most spreadsheet
            # applications.
            include_facts = request.GET.get('include_facts', False)
            csv_headers = ['Certname',
                           'Latest Catalog',
                           'Latest Report',
                           'Latest Facts',
                           'Success',
                           'Noop',
                           'Failure',
                           'Skipped',
                           'Run Status']
            if include_facts is not False:
                merged_list_facts = []
                facts = {}
                for fact in include_facts.split(','):
                    # Sanitize the fact input from the user
                    fact = fact.strip()
                    # Add the fact name to the headers list
                    csv_headers.append(fact)

                    # build the params for each fact.
                    facts_params = {
                        'query':
                            {
                                1: '["=","name","' + fact + '"]'
                            },
                    }
                    fact_list = puppetdb.api_get(
                        api_url=source_url,
                        cert=source_certs,
                        verify=source_verify,
                        path='facts',
                        params=puppetdb.mk_puppetdb_query(facts_params),
                        api_version='v4',
                    )
                    # Populate the facts dict with the facts we have retrieved
                    # Convert the fact list into a fact dict!
                    facts[fact] = {item['certname']: item for item in fact_list}

                i = 1
                jobs = {}
                # Add ID to each job so that it can be assembled in
                # the same order after we recieve the job results
                # We do this via jobs so that we can get faster results.
                for node in rows:
                    jobs[i] = {
                        'id': i,
                        'include_facts': include_facts.split(','),
                        'node': node,
                        'facts': facts,
                    }
                    i += 1

                csv_results = generate_csv(jobs)
                rows = []
                i = 1
                # with the job results we can now recreate merged_list
                # in the order we sent them.
                while i <= len(csv_results):
                    rows.append(csv_results[i])
                    i += 1
            # Insert the csv header to the top of the list.
            rows.insert(0, csv_headers)
            pseudo_buffer = Echo()
            writer = csv.writer(pseudo_buffer)
            response = StreamingHttpResponse((writer.writerow(row) for row in rows),
                                             content_type="text/csv")
            response['Content-Disposition'] = 'attachment; filename="puppetdata-%s.csv"' % (datetime.datetime.now())
            return response

    """
    c_r_s* = current request sort
    c_r_* = current req
    r_s* = requests available
    """
    context = {
        'nodeList': rows,
        'total_nodes': total_results,
        'c_r_page': page_num,
        'c_r_limit': request.session['limits'],
        'r_sfield': valid_sort_fields,
        'c_r_sfield': sort_field,
        'r_sfieldby': ['asc', 'desc'],
        'c_r_sfieldby': sort_field_order,
        'c_r_sfieldby_o': sort_field_order_opposite,
        'tot_pages': '{0:g}'.format(num_pages),
    }
    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 21
0
def facts_json(request):
    context = {}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)

    certname = None
    facts = None
    if 'certname' in request.GET:
        certname = request.GET.get('certname')
    if 'facts' in request.GET:
        facts = request.GET.get('facts').split(',')

    if not certname:
        context['error'] = 'Certname not specified.'
        return HttpResponse(json.dumps(context))
    if facts:
        # validate string for illegal chars
        fact_query = list()
        for fact in facts:
            fact = fact.strip()
            # Match for characters that are not a-Z or 0-9 or _
            # if theres a match illegal chars exist...
            regex = re.compile(r'[^aA-zZ0-9_]')
            matches = regex.findall(fact)
            if matches:
                context['error'] = 'Illegal characters found in facts list. '
                context['error'] += 'Facts must not match anything withddd this regex <[^aA-zZ0-9_]>.'
                return HttpResponse(json.dumps(context))
            fact_query.append('["=","name","' + fact + '"]')
        fact_query = ','.join(fact_query)
        facts_params = {
            'query':
                {
                    1: '["and",["=","certname","' + certname + '"],["or",' + fact_query + ']]'
                },
            'order-by':
                {
                    'order_field':
                        {
                            'field': 'name',
                            'order': 'asc',
                        }
                }
        }
    else:
        facts_params = {
            'query':
                {
                    1: '["=","certname","' + certname + '"]'
                },
            'order_by':
                {
                    'order-field':
                        {
                            'field': 'name',
                            'order': 'asc',
                        }
                }
        }
    facts_list = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='facts',
        params=puppetdb.mk_puppetdb_query(
            facts_params, request),
    )
    context['certname'] = certname
    context['facts_list'] = facts_list

    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 22
0
def analytics(request):
    context = {'timezones': pytz.common_timezones,
               'SOURCES': AVAILABLE_SOURCES}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)
    events_class_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]]'
            },
        'summarize_by': 'containing_class',
    }
    events_resource_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]]'
            },
        'summarize_by': 'resource',
    }
    events_status_params = {
        'query':
            {
                1: '["and",["=","latest_report?",true],["in","certname",["extract","certname",["select_nodes",["null?","deactivated",true]]]]]'
            },
        'summarize_by': 'resource',
    }
    reports_runavg_params = {
        'limit': 100,
        'order_by': {
            'order_field': {
                'field': 'receive_time',
                'order': 'desc',
            },
            'query_field': {'field': 'certname'},
        },
    }
    jobs = {
        'events_class_list': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'events_class_list',
            'path': '/event-counts',
            'api_version': 'v4',
            'params': events_class_params,
            'request': request
        },
        'events_resource_list': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'events_resource_list',
            'path': '/event-counts',
            'api_version': 'v4',
            'params': events_resource_params,
            'request': request
        },
        'events_status_list': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'events_status_list',
            'path': '/aggregate-event-counts',
            'api_version': 'v4',
            'params': events_status_params,
            'request': request
        },
        'reports_run_avg': {
            'url': source_url,
            'certs': source_certs,
            'verify': source_verify,
            'id': 'reports_run_avg',
            'path': '/reports',
            'api_version': 'v4',
            'params': reports_runavg_params,
            'request': request
        },
    }

    job_results = run_puppetdb_jobs(jobs, 4)

    reports_run_avg = job_results['reports_run_avg']
    events_class_list = job_results['events_class_list']
    events_resource_list = job_results['events_resource_list']
    events_status_list = job_results['events_status_list']

    num_runs_avg = len(reports_run_avg)
    run_avg_times = []
    avg_run_time = 0
    for report in reports_run_avg:
        run_time = "{0:.0f}".format(
            (json_to_datetime(report['end_time']) - json_to_datetime(report['start_time'])).total_seconds())
        avg_run_time += int(run_time)
        run_avg_times.append(run_time)
    if num_runs_avg != 0:
        avg_run_time = "{0:.0f}".format(avg_run_time / num_runs_avg)
    else:
        avg_run_time = 0

    class_event_results = []
    class_resource_results = []
    class_status_results = []

    for item in events_class_list:
        class_name = item['subject']['title']
        class_total = item['skips'] + item['failures'] + item['noops'] + item['successes']
        class_event_results.append((class_name, class_total))

    for item in events_resource_list:
        class_name = item['subject']['type']
        class_total = item['skips'] + item['failures'] + item['noops'] + item['successes']
        class_resource_results.append((class_name, class_total))

    if events_status_list:
        for status, value in events_status_list[0].items():
            if value is 0 or status == 'total' or status == 'summarize_by':
                continue
            class_status_results.append((status, value))

    context['class_events'] = class_event_results
    context['class_status'] = class_status_results
    context['resource_events'] = class_resource_results
    context['run_times'] = run_avg_times
    context['run_num'] = num_runs_avg
    context['run_avg'] = avg_run_time

    return render(request, 'pano/analytics/analytics.html', context)
Ejemplo n.º 23
0
def catalogue_json(request, certname=None):
    context = dict()
    data = dict()
    if not certname:
        context['error'] = 'Must specify certname.'
        return HttpResponse(json.dumps(context), content_type="application/json")
    source_url, source_certs, source_verify = get_server(request)

    # Redirects to the events page if GET param latest is true..
    show = request.GET.get('show', 'edges')
    save_catalog = request.GET.get('save', 'false')

    catalogue_params = {}

    path = '/catalogs/%s' % certname

    catalogue = puppetdb.api_get(
        path=path,
        api_url=source_url,
        verify=source_verify,
        cert=source_certs,
        api_version='v4',
        params=puppetdb.mk_puppetdb_query(catalogue_params, request),
    )
    if 'error' not in catalogue and save_catalog == 'true':
        report_param = {
            'query':
                {
                    'operator': 'and',
                    1: '["=","latest_report?",true]',
                    2: '["=","certname","%s"]' % certname
                }
        }
        report_url = '/reports'
        latest_report = puppetdb.api_get(
            path=report_url,
            api_url=source_url,
            verify=source_verify,
            cert=source_certs,
            api_version='v4',
            params=puppetdb.mk_puppetdb_query(report_param, request),
        )

        report_hash = latest_report[0]['hash']
        catalogue_hash = catalogue['hash']
        catalogue_timestamp = catalogue['producer_timestamp']

        try:
            saved_catalogue = SavedCatalogs.objects.get(hostname=certname, catalogue_id=catalogue_hash)

            if saved_catalogue.linked_report != report_hash:
                # Grab the linked report from the result set.
                old_linked_report = saved_catalogue.linked_report

                # Update the data.
                saved_catalogue.linked_report = report_hash
                saved_catalogue.timestamp = catalogue_timestamp

                # Save the new data.
                saved_catalogue.save()

                data['success'] = 'Catalogue hash updated.'
                data['certname'] = certname
                data['old_linked_report'] = old_linked_report
                data['new_linked_report'] = report_hash
                return HttpResponse(json.dumps(data, indent=2), content_type='application/json')
            else:
                data['error'] = 'Catalogue hash already exists.'
                data['certname'] = certname
                data['catalogue_hash'] = catalogue_hash
                data['linked_report'] = saved_catalogue.linked_report
                return HttpResponseBadRequest(json.dumps(data, indent=2), content_type='application/json')

        except SavedCatalogs.DoesNotExist:
            # since we couldnt find it in the db its safe to asusme that we can create it!
            SavedCatalogs.objects.create(hostname=certname,
                                         catalogue_id=catalogue_hash,
                                         linked_report=report_hash,
                                         timestamp=catalogue_timestamp,
                                         catalogue=json.dumps(catalogue))
            data['success'] = 'Saved catalogue.'
            data['certname'] = certname
            data['catalogue_hash'] = catalogue_hash
            data['linked_report'] = report_hash
            return HttpResponse(json.dumps(data, indent=2), content_type='application/json')

    if show == 'edges':
        data['data'] = catalogue['edges']['data']
    elif show == 'resources':
        data['data'] = catalogue['resources']['data']
    else:
        data['data'] = catalogue

    return HttpResponse(json.dumps(data, indent=2), content_type='application/json')
Ejemplo n.º 24
0
def reports_json(request, certname=None):
    source_url, source_certs, source_verify = get_server(request)
    # Redirects to the events page if GET param latest is true..
    context = {}
    # Cur Page Number
    if request.GET.get('page', False):
        if request.session['report_page'] != int(request.GET.get('page', 1)):
            request.session['report_page'] = int(request.GET.get('page', 1))
        if request.session['report_page'] <= 0:
            request.session['report_page'] = 1
    else:
        if 'report_page' not in request.session:
            request.session['report_page'] = 1
    if request.session['report_page'] <= 0:
        offset = 0
    else:
        offset = (25 * request.session['report_page']) - 25
    reports_params = {
        'query':
            {
                1: '["=","certname","' + certname + '"]'
            },
        'order_by':
            {
                'order_field':
                    {
                        'field': 'start_time',
                        'order': 'desc',
                    },
            },
        'limit': 25,
        'include_total': 'true',
        'offset': offset,
    }
    reports_list, headers = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/reports',
        api_version='v4',
        params=puppetdb.mk_puppetdb_query(
            reports_params, request),
    )
    # Work out the number of pages from the xrecords response
    xrecords = headers['X-Records']
    num_pages_wdec = float(xrecords) / 25
    num_pages_wodec = float("{:.0f}".format(num_pages_wdec))
    if num_pages_wdec > num_pages_wodec:
        num_pages = num_pages_wodec + 1
    else:
        num_pages = num_pages_wodec

    report_status = []
    for report in reports_list:
        found_report = False
        events_params = {
            'query':
                {
                    1: '["=","report","' + report['hash'] + '"]'
                },
            'summarize_by': 'certname',
        }
        eventcount_list = puppetdb.api_get(
            path='event-counts',
            api_url=source_url,
            api_version='v4',
            params=puppetdb.mk_puppetdb_query(events_params, request),
        )
        # Make list of the results
        for event in eventcount_list:
            if event['subject']['title'] == report['certname']:
                found_report = True
                report_status.append({
                    'hash': report['hash'],
                    'certname': report['certname'],
                    'environment': report['environment'],
                    'start_time': filters.date(localtime(json_to_datetime(report['start_time'])), 'Y-m-d H:i:s'),
                    'end_time': filters.date(localtime(json_to_datetime(report['end_time'])), 'Y-m-d H:i:s'),
                    'events_successes': event['successes'],
                    'events_noops': event['noops'],
                    'events_failures': event['failures'],
                    'events_skipped': event['skips'],
                    'report_status': report['status'],
                    'config_version': report['configuration_version'],
                    'run_duration': "{0:.0f}".format(
                        (json_to_datetime(report['end_time']) - json_to_datetime(report['start_time'])).total_seconds())
                })
                break
        if found_report is False:
            report_status.append({
                'hash': report['hash'],
                'certname': report['certname'],
                'environment': report['environment'],
                'start_time': filters.date(localtime(json_to_datetime(report['start_time'])), 'Y-m-d H:i:s'),
                'end_time': filters.date(localtime(json_to_datetime(report['end_time'])), 'Y-m-d H:i:s'),
                'events_successes': 0,
                'events_noops': 0,
                'events_failures': 0,
                'events_skipped': 0,
                'report_status': report['status'],
                'config_version': report['configuration_version'],
                'run_duration': "{0:.0f}".format(
                    (json_to_datetime(report['end_time']) - json_to_datetime(report['start_time'])).total_seconds())
            })

    context['certname'] = certname
    context['reports_list'] = report_status
    context['curr_page'] = request.session['report_page']
    context['tot_pages'] = "{:.0f}".format(num_pages)
    return HttpResponse(json.dumps(context), content_type="application/json")
Ejemplo n.º 25
0
def detailed_events(request, hashid=None):
    context = {'timezones': pytz.common_timezones,
               'SOURCES': AVAILABLE_SOURCES}
    if request.method == 'GET':
        if 'source' in request.GET:
            source = request.GET.get('source')
            set_server(request, source)
    if request.method == 'POST':
        request.session['django_timezone'] = request.POST['timezone']
        return redirect(request.POST['return_url'])

    source_url, source_certs, source_verify = get_server(request)
    report_timestamp = request.GET.get('report_timestamp')
    events_params = {
        'query':
            {
                1: '["=","report","' + hashid + '"]'
            },
        'order_by':
            {
                'order_field':
                    {
                        'field': 'timestamp',
                        'order': 'asc',
                    },
                'query_field': {'field': 'certname'},
            },
    }
    events_list = puppetdb.api_get(
        api_url=source_url,
        cert=source_certs,
        verify=source_verify,
        path='/events',
        api_version='v4',
        params=puppetdb.mk_puppetdb_query(events_params),
    )
    environment = ''
    certname = ''
    event_execution_times = []
    sorted_events = None
    last_event_time = None
    last_event_title = None
    run_end_time = None

    if len(events_list) != 0:
        single_event = events_list[0]
        environment = single_event['environment']
        certname = single_event['certname']
        for event in events_list:
            event_title = event['resource_title']
            event_start_time = json_to_datetime(event['timestamp'])
            if last_event_time is None and last_event_title is None:
                last_event_time = event_start_time
                last_event_title = event_title
                run_end_time = json_to_datetime(event['run_end_time'])
                continue
            else:
                event_exec_time = (event_start_time - last_event_time).total_seconds()
                add_event = (last_event_title, event_exec_time)
                event_execution_times.append(add_event)
                last_event_time = event_start_time
                last_event_title = event_title
        event_exec_time = (last_event_time - run_end_time).total_seconds()
        add_event = [last_event_title, event_exec_time]
        event_execution_times.append(add_event)
        sorted_events = sorted(event_execution_times, reverse=True, key=lambda field: field[1])
        if len(sorted_events) > 10:
            sorted_events = sorted_events[:10]
    else:
        events_list = False
    context['certname'] = certname
    context['report_timestamp'] = report_timestamp
    context['hashid'] = hashid
    context['events_list'] = events_list
    context['event_durations'] = sorted_events
    context['environment'] = environment

    return render(request, 'pano/detailed_events.html', context)
Ejemplo n.º 26
0
def catalogue_compare_json(request, certname1=None, certname2=None):
    source_url, source_certs, source_verify = get_server(request)
    show = request.GET.get('show', 'edges')
    data = dict()
    certname1_hash = request.GET.get('certname1_hash', False)
    certname2_hash = request.GET.get('certname2_hash', False)

    cata_params = dict()

    if certname1_hash:
        try:
            certname1_result = SavedCatalogs.objects.get(hostname=certname1, catalogue_id=certname1_hash)
            certname1_data = json.loads(certname1_result.catalogue)[show]['data']
        except SavedCatalogs.DoesNotExist:
            data['error'] = 'Catalogue hash not found in DB.'
            data['hash_not_found'] = certname1_hash
            data['certname'] = certname1
            return HttpResponseBadRequest(json.dumps(data, indent=2), content_type="application/json")
    else:
        certname1_result = puppetdb.api_get(
            path='/catalogs/%s' % certname1,
            api_url=source_url,
            api_version='v4',
            params=puppetdb.mk_puppetdb_query(cata_params, request),
        )
        certname1_data = certname1_result[show]['data']
    if certname2_hash:
        try:
            certname2_result = SavedCatalogs.objects.get(hostname=certname2, catalogue_id=certname2_hash)
            certname2_data = json.loads(certname2_result.catalogue)[show]['data']
        except SavedCatalogs.DoesNotExist:
            data['error'] = 'Catalogue hash not found in DB.'
            data['hash_not_found'] = certname2_hash
            data['certname'] = certname2
            return HttpResponseBadRequest(json.dumps(data, indent=2), content_type="application/json")
    else:
        certname2_result = puppetdb.api_get(
            path='/catalogs/%s' % certname2,
            api_url=source_url,
            api_version='v4',
            params=puppetdb.mk_puppetdb_query(cata_params, request),
        )
        certname2_data = certname2_result[show]['data']

    node_for = dict()
    node_agn = dict()

    if show == "edges":
        for edge in certname1_data:
            # remove the certname tag.
            try:
                edge.pop('certname')
            except:
                pass
            source_type = edge['source_type']
            source_title = edge['source_title']
            relationship = edge['relationship']
            target_type = edge['target_type']
            target_title = edge['target_title']
            node_for['%s-%s-%s-%s-%s' % (source_type, source_title, relationship, target_type, target_title)] = edge

        for edge in certname2_data:
            # remove the certname tag.
            try:
                edge.pop('certname')
            except:
                pass
            source_type = edge['source_type']
            source_title = edge['source_title']
            relationship = edge['relationship']
            target_type = edge['target_type']
            target_title = edge['target_title']
            node_agn['%s-%s-%s-%s-%s' % (source_type, source_title, relationship, target_type, target_title)] = edge

    elif show == "resources":
        for resource in certname1_data:
            # remove the certname tag.
            try:
                resource.pop('certname')
            except:
                pass
            resource_title = resource['title']
            node_for[resource_title] = resource

        for resource in certname2_data:
            try:
                # remove the certname tag.
                resource.pop('certname')
            except:
                pass
            resource_title = resource['title']
            node_agn[resource_title] = resource

    diff = DictDiffer(node_agn, node_for)

    new_entries = list()
    rem_entries = list()
    cha_entries = list()

    # List of new entries
    for new_entry in diff.added():
        new_entries.append(node_agn[new_entry])

    for rem_entry in diff.removed():
        rem_entries.append(node_for[rem_entry])

    for cha_entry in diff.changed():
        for_entry = node_for[cha_entry]
        agn_entry = node_agn[cha_entry]
        cha_entries.append({
            'from': for_entry,
            'against': agn_entry
        })

    output = {
        'added_entries': new_entries,
        'deleted_entries': rem_entries,
        'changed_entries': cha_entries
    }

    return HttpResponse(json.dumps(output, indent=2), content_type="application/json")
Ejemplo n.º 27
0
def get_file(request,
             certname,
             environment,
             rtitle,
             rtype,
             md5sum_from=None,
             md5sum_to=None,
             diff=False,
             file_status='from'):
    puppetdb_source, puppetdb_certs, puppetdb_verify = get_server(
        request=request)
    filebucket_source, filebucket_certs, filebucket_verify, filebucket_show = get_server(
        request=request, type='filebucket')
    fileserver_source, fileserver_certs, fileserver_verify, fileserver_show = get_server(
        request=request, type='fileserver')

    # If Clientbucket is enabled continue else return False

    def fetch_filebucket(url, method):
        headers = {
            'Accept': 's',
        }
        methods = {
            'get': requests.get,
            'head': requests.head,
        }
        if method not in methods:
            print('No can has method: %s' % method)
            return False
        resp = methods[method](url,
                               headers=headers,
                               verify=filebucket_verify,
                               cert=filebucket_certs)
        if resp.status_code != 200:
            return False
        else:
            return resp.text

    def fetch_fileserver(url, method):
        methods = {
            'get': requests.get,
        }

        if method not in methods:
            print('No can has method: %s' % method)
            return False
        resp = methods[method](url,
                               verify=fileserver_verify,
                               cert=fileserver_certs)
        if resp.status_code != 200:
            return False
        else:
            return resp.text

    def get_resource(certname, rtype, rtitle):
        resource_params = {
            'query': {
                'operator': 'and',
                1: '["=", "certname", "' + certname + '"]',
                2: '["=", "type", "' + rtype + '"]',
                3: '["=", "title", "' + rtitle + '"]'
            },
        }
        data = pdb_api_get(api_url=puppetdb_source,
                           path='resources',
                           verify=puppetdb_verify,
                           cert=puppetdb_certs,
                           params=mk_puppetdb_query(resource_params, request))
        if not data:
            return False
        else:
            return data

    if not filebucket_show or not fileserver_source:
        return False
    if file_status == 'both':
        if md5sum_to and md5sum_from and certname and rtitle and rtype:
            if diff:
                # is the hash from puppetdb resource same as md5sum_to
                hash_matches = False

                md5sum_from = md5sum_from.replace('{md5}', '')
                md5sum_to = md5sum_to.replace('{md5}', '')

                from_url = filebucket_source + environment + '/file_bucket_file/md5/' + md5sum_from
                to_url = filebucket_source + environment + '/file_bucket_file/md5/' + md5sum_to

                if fetch_filebucket(from_url, 'head') is not False:
                    resource_from = fetch_filebucket(from_url, 'get')
                else:
                    # Could not find old MD5 in Filebucket
                    return False
                if fetch_filebucket(to_url, 'head') is not False:
                    resource_to = fetch_filebucket(to_url, 'get')
                # Try puppetdb resources if not found in filebucket.
                else:
                    resource_to = get_resource(certname=certname,
                                               rtype=rtype,
                                               rtitle=rtitle)
                    if resource_to is False:
                        # Could not find new file in Filebucket or as a PuppetDB Resource
                        return False
                    else:
                        resource_to = resource_to[0]
                    if 'content' in resource_to['parameters']:
                        resource_to = resource_to['parameters']['content']
                        hash_of_resource = get_hash(resource_to)
                        if hash_of_resource == md5sum_to:
                            # file from resource matches filebucket md5 hash
                            hash_matches = True
                    # Solve the viewing of source files by retrieving it from Puppetmaster
                    elif 'source' in resource_to[
                            'parameters'] and fileserver_show is True:
                        source_path = resource_to['parameters']['source']
                        if source_path.startswith('puppet://'):
                            # extract the path for the file
                            source_path = source_path.split(
                                '/'
                            )  # ['puppet:', '', '', 'files', 'autofs', 'auto.home']
                            source_path = '/'.join(
                                source_path[3:]
                            )  # Skip first 3 entries since they are not needed
                            # https://puppetmaster.example.com:8140/production/file_content/files/autofs/auto.home
                            url = fileserver_source + environment + '/file_content/' + source_path
                            resource_to = fetch_fileserver(url, 'get')
                    else:
                        return False
                # now that we have come this far, we have both files.
                # Lets differentiate the shit out of these files.
                from_split_lines = resource_from.split('\n')
                to_split_lines = resource_to.split('\n')
                diff = difflib.unified_diff(from_split_lines, to_split_lines)
                diff = ('\n'.join(list(diff))).split('\n')
                return diff
            else:
                return False
        else:
            return False

    if file_status == 'from' and md5sum_from:
        md5sum = md5sum_from.replace('{md5}', '')
    elif file_status == 'to' and md5sum_to:
        md5sum = md5sum_to.replace('{md5}', '')
    else:
        return False
    # Creates headers and url from the data we got

    url_clientbucket = filebucket_source + environment + '/file_bucket_file/md5/' + md5sum
    if fetch_filebucket(url_clientbucket, 'head') is False:
        # Check if theres a resource available for the latest file available
        if file_status == 'to':
            resp_pdb = get_resource(certname=certname,
                                    rtype=rtype,
                                    rtitle=rtitle)
            # we got the data lets give the user the good news.
            if resp_pdb:
                resource_data = resp_pdb[0]
                if 'content' in resource_data['parameters']:
                    prepend_text = 'This file with MD5 %s was found in PuppetDB Resources.\n\n' % (
                        get_hash(resource_data['parameters']['content']))
                    return prepend_text + resource_data['parameters']['content']
                elif 'source' in resource_data[
                        'parameters'] and fileserver_show is True:
                    source_path = resource_data['parameters']['source']
                    if source_path.startswith('puppet://'):
                        # extract the path for the file
                        source_path = source_path.split(
                            '/'
                        )  # ['puppet:', '', '', 'files', 'autofs', 'auto.home']
                        source_path = '/'.join(
                            source_path[3:]
                        )  # Skip first 3 entries since they are not needed
                        # https://puppetmaster.example.com:8140/production/file_content/files/autofs/auto.home
                        url = fileserver_source + environment + '/file_content/' + source_path
                        source_content = fetch_fileserver(url, 'get')
                        prepend_text = 'This file with MD5 %s was retrieved from the PuppetMaster Fileserver.\n\n' % (
                            get_hash(source_content))
                        return prepend_text + source_content
                    else:
                        return False
                else:
                    return False
            # the file can't be found as a resource and or fileserver support not enabled
            else:
                return False
        # We probably don't want to search for resources if its the old file.
        else:
            return False
    else:
        filebucket_results = fetch_filebucket(url_clientbucket, 'get')
        prepend_text = 'This file with MD5 %s was found in Filebucket.\n\n' % (
            md5sum)
        return prepend_text + filebucket_results