def archive_collections(CONFIG, age=90): logger = logging.getLogger("collection_archive") # Parse my General Configuration if isinstance(CONFIG, dict): config_items = CONFIG elif isinstance(CONFIG, str): config_items = manoward.get_manoward(explicit_config=CONFIG) else: raise TypeError("No Configuration Given.") db_conn = manoward.get_conn( config_items, prefix="store_", tojq=".database", ac_def=True) cur = db_conn.cursor() archive_ts = int(time.time()) logger.debug("Archive ts: {}".format(archive_ts)) populate_archive_sql = '''REPLACE INTO collection_archive SELECT * FROM collection WHERE last_update < FROM_UNIXTIME(%s) - interval %s DAY ; ''' remove_overachieving_sql = '''DELETE FROM collection WHERE last_update < FROM_UNIXTIME(%s) - interval %s DAY ; ''' archive_args = [archive_ts, age] copy_action = manoward.run_query(cur, populate_archive_sql, args=archive_args, require_results=False, do_abort=False) if copy_action["has_error"] is True: logger.error("{}Had an Error When Running Archive. Ignoring Delete{}".format( Fore.RED, Style.RESET_ALL)) else: # Run Delete logger.info("Archive Worked Swimmingly. Let's Go Ahead and Delete.") delete_action = manoward.run_query(cur, remove_overachieving_sql, args=archive_args, require_results=False, do_abort=False) if delete_action["has_error"] is True: logger.error("{}Error when deleting the Excess.{}".format( Fore.RED, Style.RESET_ALL)) else: logger.info("{}Collection Table Archived {}".format( Fore.GREEN, Style.RESET_ALL))
def api2_collected_types(): meta_dict = dict() request_data = list() links_dict = dict() error_dict = dict() meta_dict["version"] = 2 meta_dict["name"] = "Jellyfish API Version 2 : Collected Types" meta_dict["status"] = "In Progress" links_dict["children"] = dict() links_dict["parent"] = "{}{}/collected".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/collected/types".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) requesttype = "collection_type" # Have a deterministic query so that query caching can do it's job collected_type_args = [g.twoDayTimestamp] collected_type_query = "select distinct(collection_type) from collection where last_update >= FROM_UNIXTIME(%s)" results = manoward.run_query(g.cur, collected_type_query, args=collected_type_args, one=False, do_abort=True, require_results=False) for this_ctype in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = None this_results["attributes"] = this_ctype # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_collected_values(ctype="none"): ''' Get's Actual Collections about a Particular Collection Type With Lot's of Filters ''' args_def = {"ctype": {"req_type": str, "default": ctype, "required": True, "sql_param": True, "sql_clause": "collection.collection_type = %s", "qdeparse": False}} args = manoward.process_args(args_def, request.args, coll_lulimit=g.twoDayTimestamp, include_hosts_sql=True, include_coll_sql=True, include_exact=True) meta_dict = dict() request_data = list() links_dict = dict() meta_dict["version"] = 2 meta_dict["name"] = "Jellyfish API Version 2 : Collected values for type {}".format( args["ctype"]) meta_dict["status"] = "In Progress" # TODO links_dict["children"] = {} links_dict["parent"] = "{}{}/collected/subtypes/".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/collected/values/{}?{}".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["ctype"], args["qdeparsed_string"]) requesttype = "collected_value" collected_values_query = '''SELECT collection_id, hosts.host_id, hosts.hostname, hosts.hoststatus, hosts.pop, hosts.srvtype, collection_type, collection_subtype, collection_value, UNIX_TIMESTAMP(collection.initial_update) as initial_update, UNIX_TIMESTAMP(collection.last_update) as last_update FROM collection JOIN hosts ON fk_host_id = hosts.host_id where {}'''.format(" and ".join(args["args_clause"])) results = manoward.run_query(g.cur, collected_values_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for this_coll in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_coll["collection_id"] this_results["attributes"] = this_coll # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_auditresults(audit_id=0): ''' Return the Audit Results for Particular Audit filtered by A series of Items. ''' args_def = args_def = { "audit_id": { "req_type": int, "default": audit_id, "required": True, "sql_param": True, "sql_clause": "fk_audits_id = %s", "positive": True }, } args = manoward.process_args(args_def, request.args, include_hosts_sql=True, include_ar_sql=True, include_exact=True, abh_limit=g.twoDayTimestamp) meta_dict = dict() request_data = list() links_dict = dict() requesttype = "auditresults" meta_dict["version"] = 2 meta_dict[ "name"] = "Jellyfish API Version 2 Audit Results for Audit ID {}".format( args["audit_id"]) meta_dict["status"] = "In Progress" links_dict["parent"] = "{}{}".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/auditresults/{}?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["audit_id"], args["qdeparsed_string"]) audit_result_query = '''select audit_result_id, audits.audit_name, fk_host_id, hosts.hostname, fk_audits_id, UNIX_TIMESTAMP(initial_audit) as 'initial_audit', UNIX_TIMESTAMP(last_audit) as 'last_audit', bucket, audit_result, audit_result_text, hosts.pop, hosts.srvtype, hosts.hoststatus from audits_by_host join hosts on fk_host_id = host_id join audits on fk_audits_id = audit_id where {}'''.format(" and ".join( args["args_clause"])) results = manoward.run_query(g.cur, audit_result_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for this_a_result in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_a_result["fk_host_id"] this_results["attributes"] = this_a_result this_results["auditinfo"] = this_a_result["audit_result_id"] this_results["relationships"] = dict() this_results["relationships"][ "hostinfo"] = "{}{}/hostcollections/{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], this_a_result["fk_host_id"]) # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_auditinfo_buckets(audit_id=0): ''' Loads the Audit Definition from Disk and Reads in the Arbitrarily Complex Audit Filters and Comparisons to Provide the Needed Data ''' requesttype = "Audit Buckets" meta_info = dict() meta_info["version"] = 2 meta_info["name"] = "Audit Bucket Information." meta_info["state"] = "In Progress" meta_info["children"] = dict() links_info = dict() links_info["self"] = "{}{}/auditinfo/{}/buckets".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], audit_id) links_info["parent"] = "{}{}/auditinfo/{}/".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], audit_id) links_info["children"] = dict() request_data = list() select_query = '''select filename, audit_name from audits where audit_id = %s order by audit_priority desc, audit_id desc ''' if audit_id <= 0: g.logger.error("Zero or Negative Bucket ID Given") abort(404) run_result = manoward.run_query(g.cur, select_query, args=[audit_id], one=True, do_abort=True, require_results=True) requested_audit = run_result.get("data", dict()) audit = dict() audit["id"] = audit_id audit["type"] = requesttype audit["relationships"] = dict() audit["relationships"]["auditinfo"] = "{}{}/auditinfo/{}/".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], audit_id) audit["attributes"] = dict() # # Now Load File # try: this_audit_config = audittools.load_auditfile( requested_audit["filename"]) except Exception as audit_error: g.logger.error("Unable to Parse Data from Auditfile : {}".format( requested_audit["filename"])) g.logger.debug(audit_error) abort(500) if requested_audit["audit_name"] not in this_audit_config.keys(): g.logger.error("Unable to Find Audit Described in File.") g.logger.debug("Available Audits in file {} : {}".format( requested_audit["filename"], this_audit_config.keys())) abort(404) audit["attributes"]["filters"] = this_audit_config[ requested_audit["audit_name"]]["filters"] audit["attributes"]["comparisons"] = this_audit_config[ requested_audit["audit_name"]]["comparisons"] request_data.append(audit) return jsonify(meta=meta_info, data=request_data, links=links_info)
def api2_auditresults_range(backdays=0, audit_id=0): ''' Get the Results going Back backdays number of days ''' args_def = args_def = { "audit_id": { "req_type": int, "default": audit_id, "required": True, "sql_param": True, "sql_clause": "fk_audits_id = %s", "positive": True }, "backdays": { "req_type": int, "default": backdays, "required": True, "sql_param": False, "positive": True }, } args = manoward.process_args(args_def, request.args, include_hosts_sql=True, include_ar_sql=True, include_exact=True) meta_dict = dict() request_data = list() links_dict = dict() meta_dict["version"] = 2 meta_dict[ "name"] = "Jellyfish API Version 2 Counts Results over a range for a particular type {} for {} days".format( args["audit_id"], args["backdays"]) meta_dict["status"] = "In Progress" requesttype = "auditresults_range" links_dict["parent"] = "{}{}/auditresults/{}?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["audit_id"], args["qdeparsed_string"]) links_dict["self"] = "{}{}/auditresults/{}/range/{}?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["audit_id"], args["backdays"], args["qdeparsed_string"]) # Generate a List of Timestamps to Cycle Through check_timestamps = list() for x in range(args["backdays"], 0, -1): this_timestamp_to_add = g.MIDNIGHT - (x * 86400) this_date_object = date.fromtimestamp(this_timestamp_to_add) this_date_string = this_date_object.strftime('%m-%d') check_timestamps.append([ this_date_string, this_timestamp_to_add, this_timestamp_to_add, this_timestamp_to_add ]) audit_result_query = '''select %s as date, count(*) as hosts, '%s' as timestamp from ( select * from audits_by_host join hosts on fk_host_id = host_id join audits on fk_audits_id = audit_id where {} and initial_audit <= FROM_UNIXTIME(%s) and last_audit >= FROM_UNIXTIME(%s) group by fk_host_id ) as this_hosts '''.format(" and ".join(args["args_clause"])) # Build my Arguments List query_args = [[ timestamp[0], timestamp[1], *args["args_clause_args"], timestamp[2], timestamp[3] ] for timestamp in check_timestamps] run_result = manoward.run_query(g.cur, audit_result_query, args=query_args, one=False, do_abort=True, require_results=True, many=True) for this_day_result in run_result.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_day_result["timestamp"] this_results["attributes"] = this_day_result this_results["relationships"] = dict() this_results["relationships"][ "auditinfo"] = "{}{}/auditinfo/{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["audit_id"]) this_results["relationships"][ "auditresults_timestamp"] = "{}{}/auditresults/{}/{}?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["audit_id"], this_day_result["timestamp"], args.get("qdeparsed_string", "")) # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_collected_types(ctype="none"): ''' Return the Available Subtypes for a particular type ''' args_def = { "ctype": { "req_type": str, "default": ctype, "required": True, "sql_param": True, "sql_clause": "collection_type = %s", "qdeparse": False } } args = manoward.process_args(args_def, request.args, lulimit=g.twoDayTimestamp) meta_dict = dict() request_data = list() links_dict = dict() meta_dict["version"] = 2 meta_dict[ "name"] = "Jellyfish API Version 2 : Collected Subtypes for type {}".format( args["ctype"]) meta_dict["status"] = "In Progress" links_dict["children"] = dict() links_dict["parent"] = "{}{}/collected/types".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/collected/subtypes/{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["ctype"]) requesttype = "collection_subtype" # Have a deterministic query so that query caching can do it's job collected_subtypes_filtered_query_args = [ str(g.twoDayTimestamp), str(ctype) ] collected_subtype_query = '''select distinct(collection_subtype) as subtype_name from collection where {}'''.format(" and ".join( args["args_clause"])) results = manoward.run_query(g.cur, collected_subtype_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for this_subtype in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_subtype["subtype_name"] this_results["attributes"] = this_subtype this_results["relationships"] = { "values": "{}{}/collected/values/{}/{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["ctype"], this_subtype["subtype_name"]) } request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_auditresults_timestamp(request_timestamp=0, audit_id=0): ''' Return the Audit Results as they appeared at a particular time. ''' args_def = args_def = {"audit_id": {"req_type": int, "default": audit_id, "required": True, "sql_param": True, "sql_clause": "fk_audits_id = %s", "positive": True}, "request_timestamp": {"req_type": int, "default": request_timestamp, "required": True, "sql_param": True, "sql_param_count": 2, "sql_clause": "initial_audit <= FROM_UNIXTIME( %s ) and last_audit >= FROM_UNIXTIME( %s )", "positive": True} } args = manoward.process_args(args_def, request.args, include_hosts_sql=True, include_ar_sql=True, include_exact=True) meta_dict = dict() request_data = list() links_dict = dict() meta_dict["version"] = 2 meta_dict["name"] = "Jellyfish API Version 2 Audit Results for Audit ID " + \ str(audit_id) + " at time " + str(request_timestamp) meta_dict["status"] = "In Progress" links_dict["parent"] = "{}{}/auditresults?".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/auditresults/{}/{}?{}".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["audit_id"], args["request_timestamp"], args["qdeparsed_string"]) requesttype = "auditresults_timestamp" audit_result_ts_query = '''select audit_result_id, audits.audit_name, fk_host_id, hosts.hostname, fk_audits_id, UNIX_TIMESTAMP(initial_audit) as 'initial_audit', UNIX_TIMESTAMP(last_audit) as 'last_audit', bucket, audit_result, audit_result_text, hosts.pop, hosts.srvtype from audits_by_host join hosts on fk_host_id = host_id join audits on fk_audits_id = audit_id where {} group by fk_host_id '''.format(" and ".join(args["args_clause"])) results = manoward.run_query(g.cur, audit_result_ts_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for this_result in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_result["fk_host_id"] this_results["attributes"] = this_result this_results["auditinfo"] = this_result["audit_result_id"] this_results["relationships"] = dict() # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_dashboard(cust_dash_id=None): args_def = { "pass_audits": { "required": True, "default": "false", "req_type": str, "enum": ["true", "false"] }, "fail_audits": { "required": True, "default": "false", "req_type": str, "enum": ["true", "false"] }, "cust_dash_id": { "required": False, "default": cust_dash_id, "req_type": int, "positive": True } } args = manoward.process_args(args_def, request.args) requesttime = time.time() requesttype = "dashboard_query" meta_info = dict() meta_info["version"] = 2 meta_info["name"] = "Dashboard Query for Jellyfish2 API Version 2" meta_info["state"] = "In Progress" meta_info["children"] = dict() links_info = dict() links_info["self"] = "{}{}/dashboard".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_info["parent"] = "{}{}/".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_info["children"] = dict() request_data = list() error_dict = dict() do_query = True dashboard_query_head = ''' SELECT audits.audit_name, audits.audit_id, audits.audit_priority, audit_short_description, audits_by_acoll.acoll_passed, audits_by_acoll.acoll_failed, audits_by_acoll.acoll_exempt FROM (SELECT fk_audits_id, max(acoll_last_audit) AS maxtime FROM audits_by_acoll WHERE acoll_last_audit >= FROM_UNIXTIME(%s) GROUP BY fk_audits_id) AS maxdate JOIN audits_by_acoll ON audits_by_acoll.acoll_last_audit = maxtime AND audits_by_acoll.fk_audits_id = maxdate.fk_audits_id JOIN audits ON audits.audit_id = maxdate.fk_audits_id ''' dashboard_query_args = [str(g.twoDayTimestamp)] # Inject Custom Dashboard Items # print(cust_dash_id) if args["cust_dash_id"] is not None: custdashboard_join = ''' JOIN (SELECT fk_audits_id AS dash_audit_id FROM custdashboardmembers WHERE fk_custdashboardid = %s ) AS thisdash ON maxdate.fk_audits_id = thisdash.dash_audit_id ''' dashboard_query_args.append(args["cust_dash_id"]) dashboard_query_head = dashboard_query_head + custdashboard_join meta_info["cust_dash_id"] = args["cust_dash_id"] meta_info["custom_dashbaord"] = True this_endpoint = g.config_items["v2api"]["preroot"] + g.config_items["v2api"]["root"] + \ "/custdashboard/list/{}/".format(args["cust_dash_id"]) links_info["cust_dash_id"] = this_endpoint else: meta_info["custom_dashboard"] = False if (args["pass_audits"] == "false" and args["fail_audits"] == "false") or ( args["pass_audits"] == "true" and args["pass_audits"] == "true"): # Give me Everything dashboard_query_mid = " " elif args["pass_audits"] == "true": # I want only the Audits that have completely passed (Where there are no failures) dashboard_query_mid = " where acoll_failed = 0 " meta_info[ "Query Info"] = "Only Passing Audits have been returned (Audits where there are zero failures)." elif args["fail_audits"] == "true": # I want only the Audits that have failed (Where there are at least one failure) dashboard_query_mid = " where acoll_failed > 0 " meta_info[ "Query Info"] = "Only Failing Audits have been returnd (Audits where there are more than zero failures)." dashboard_query_tail = "order by audits.audit_priority desc, acoll_failed desc" dashboard_query = dashboard_query_head + \ dashboard_query_mid + dashboard_query_tail # Select Query if do_query is True: results = manoward.run_query(g.cur, dashboard_query, args=dashboard_query_args, one=False, do_abort=True, require_results=False) all_collections = results["data"] amount_of_collections = len(results["data"]) else: error_dict["do_query"] = "Query Ignored" amount_of_collections = 0 if amount_of_collections > 0: collections_good = True # Hydrate the dict with type & ids to be jsonapi compliant for i in range(0, len(all_collections)): this_results = dict() this_results["type"] = requesttype this_results["id"] = all_collections[i]["audit_id"] this_results["attributes"] = all_collections[i] this_results["attributes"]["total_servers"] = all_collections[i]["acoll_exempt"] + \ all_collections[i]["acoll_failed"] + \ all_collections[i]["acoll_passed"] this_results["attributes"]["total_pass_fail"] = all_collections[i]["acoll_failed"] + \ all_collections[i]["acoll_passed"] this_results["attributes"]["pass_percent"] = all_collections[i]["acoll_passed"] / \ this_results["attributes"]["total_servers"] this_results["attributes"]["pass_percent_int"] = int( (all_collections[i]["acoll_passed"] / this_results["attributes"]["total_servers"]) * 100) this_results["attributes"]["fail_percent"] = all_collections[i]["acoll_failed"] / \ this_results["attributes"]["total_servers"] this_results["attributes"]["fail_percent_int"] = int( (all_collections[i]["acoll_failed"] / this_results["attributes"]["total_servers"]) * 100) this_results["attributes"]["exempt_percent"] = all_collections[i]["acoll_exempt"] / \ this_results["attributes"]["total_servers"] this_results["attributes"]["exempt_percent_int"] = int( (all_collections[i]["acoll_exempt"] / this_results["attributes"]["total_servers"]) * 100) this_results["relationships"] = dict() this_results["relationships"]["auditinfo"] = g.config_items["v2api"]["preroot"] + \ g.config_items["v2api"]["root"] + "/auditinfo/" + \ str(all_collections[i]["audit_id"]) this_results["relationships"]["display_auditinfo"] = g.config_items["v2ui"]["preroot"] + \ g.config_items["v2ui"]["root"] + "/auditinfo/" + \ str(all_collections[i]["audit_id"]) this_results["relationships"]["auditresults"] = { "pass": g.config_items["v2api"]["preroot"] + g.config_items["v2api"]["root"] + "/auditresults/" + str(all_collections[i]["audit_id"]) + "?auditResult='pass'", "fail": g.config_items["v2api"]["preroot"] + g.config_items["v2api"]["root"] + "/auditresults/" + str(all_collections[i]["audit_id"]) + "?auditResult='fail'", "exempt": g.config_items["v2api"]["preroot"] + g.config_items["v2api"]["root"] + "/auditresults/" + str(all_collections[i]["audit_id"]) + "?auditResult='notafflicted'" } this_results["relationships"]["display_auditresults"] = { "pass": g.config_items["v2ui"]["preroot"] + g.config_items["v2ui"]["root"] + "/auditresults/" + str(all_collections[i]["audit_id"]) + "?auditResult='pass'", "fail": g.config_items["v2ui"]["preroot"] + g.config_items["v2ui"]["root"] + "/auditresults/" + str(all_collections[i]["audit_id"]) + "?auditResult='fail'", "exempt": g.config_items["v2ui"]["preroot"] + g.config_items["v2ui"]["root"] + "/auditresults/" + str(all_collections[i]["audit_id"]) + "?auditResult='notafflicted'" } # Now pop this onto request_data request_data.append(this_results) else: error_dict["ERROR"] = ["No Collections"] collections_good = False all_res = {"meta": meta_info, "links": links_info} if collections_good: all_res["data"] = request_data else: all_res["errors"] = error_dict return jsonify(**all_res)
def api2_hostcollections(host_id=0): args_def = {"hostid": {"req_type": int, "default": host_id, "required": True, "positive": True, "sql_param": True, "sql_clause": " fk_host_id = %s "}, "ctype": {"req_type": str, "default": None, "required": False, "sql_param": True, "sql_clause": "collection.collection_type REGEXP %s", "sql_exact_clause": "collection.collection_type = %s", "qdeparse": True} } args = manoward.process_args(args_def, request.args, coll_lulimit=g.twoDayTimestamp, include_coll_sql=True, include_exact=True) meta_dict = dict() request_data = list() links_dict = dict() meta_dict["version"] = 2 meta_dict["name"] = "Jellyfish API Version 2 Host Results for Host ID {}".format( args["hostid"]) meta_dict["status"] = "In Progress" links_dict["parent"] = "{}{}/".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/hostinfo/{}?{}".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["hostid"], args["qdeparsed_string"]) requesttype = "host_collections" host_collections_query = '''select collection_id, fk_host_id, UNIX_TIMESTAMP(initial_update) as initial_update, UNIX_TIMESTAMP(collection.last_update) as last_update, hostname, pop, srvtype, hoststatus, UNIX_TIMESTAMP(hosts.last_update) as hlast_update, collection_type, collection_subtype, collection_value from collection join hosts on collection.fk_host_id = hosts.host_id where {} group by collection_type, collection_subtype'''.format(" and ".join(args["args_clause"])) results = manoward.run_query(g.cur, host_collections_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) meta_dict["host_information"] = dict() if len(results.get("data", list())) > 0: # Inject some Meta Data hostzero = results.get("data", list())[0] g.logger.debug(hostzero) meta_dict["host_information"]["hostname"] = hostzero["hostname"] meta_dict["host_information"]["pop"] = hostzero["pop"] meta_dict["host_information"]["srvtype"] = hostzero["srvtype"] meta_dict["host_information"]["hoststatus"] = hostzero["hoststatus"] meta_dict["host_information"]["last_update"] = hostzero["hlast_update"] else: meta_dict["host_information"]["hostname"] = "No Results" meta_dict["host_information"]["pop"] = str() meta_dict["host_information"]["srvtype"] = str() meta_dict["host_information"]["hoststatus"] = str() meta_dict["host_information"]["last_update"] = 0 for this_coll in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_coll["collection_id"] this_results["attributes"] = this_coll this_results["relationships"] = dict() # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_hostsearch(): ''' Execute a Search for Hosts ''' args_def = { "ctype": { "req_type": str, "default": None, "required": False, "sql_param": True, "sql_clause": "collection.collection_type REGEXP %s", "sql_exact_clause": "collection.collection_type = %s", "qdeparse": True } } args = manoward.process_args(args_def, request.args, lulimit=g.twoDayTimestamp, include_hosts_sql=True, include_coll_sql=True, include_exact=True) meta_dict = dict() request_data = list() links_dict = dict() if args.get("ctype", None) is None and args.get( "csubtype", None) is None and args.get("value", None) is None: col_join = str() else: g.logger.debug( "I need Collections Joined. This may Slow down my Query.") col_join = "join collection on host_id = collection.fk_host_id" args["args_clause"].append( "collection.last_update >= FROM_UNIXTIME(%s)") args["args_clause_args"].append(g.twoDayTimestamp) meta_dict["version"] = 2 meta_dict["name"] = "Jellyfish API Version 2 Host Search results." meta_dict["status"] = "In Progress" links_dict["parent"] = "{}{}/".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/hostsearch?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["qdeparsed_string"]) requesttype = "hostquery" host_search = '''SELECT host_id, host_uber_id, hostname, pop, srvtype, hoststatus, UNIX_TIMESTAMP(hosts.last_update) as last_update FROM hosts {0} WHERE {1} GROUP by host_id'''.format( col_join, " and ".join(args["args_clause"])) results = manoward.run_query(g.cur, host_search, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for this_host in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_host["host_id"] this_results["attributes"] = this_host this_results["relationships"] = dict() this_results["relationships"][ "host_collections"] = "{}{}/hostcollections/{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], this_host["host_id"]) # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_ipsearch(ip=None): ''' Given a IP or Subnet, Search for that Thing and Return it. Can Filter by items in the standards hosts column. Respects Exact ''' args_def = { "hostid": { "req_type": int, "default": None, "required": False, "positive": True, "sql_param": True, "sql_clause": " fk_host_id=%s " }, "iptype": { "req_type": str, "default": None, "required": False, "enum": ("vips4", "vips6", "host4", "host6", "drac4", "drac6", "netdev4", "netdev6", "unknown"), "qdeparse": True, "sql_param": True, "sql_clause": " guessed_type=%s " }, "ip": { "req_type": str, "default": ip, "required": False, "qdeparse": True, "sql_param": True, "sql_clause": " ip_hex=INET6_ATON(%s) " }, "subnet": { "req_type": str, "default": None, "required": False, "qdeparse": True, "sql_param": False } # Custom Handling for this } args = manoward.process_args(args_def, request.args, lulimit=g.twoDayTimestamp, include_hosts_sql=True, include_exact=True) meta_info = dict() meta_info["version"] = 2 meta_info["name"] = "IP Search Jellyfish2 API Version 2" meta_info["state"] = "In Progress" # Custom Handle Subnet if args.get("subnet", None) is not None: try: validated_subnet = ipaddress.ip_network(args["subnet"]) min_ip = validated_subnet[0] max_ip = validated_subnet[-1] except ValueError as valerr: logger.error( "Unable to Validate Subnet given with error : {}".format( valerr)) abort(415) except Exception as general_error: logger.error("General Error when validating Subnet : {}".format( general_error)) abort(500) else: args["args_clause"].append(" ip_hex > INET6_ATON( %s ) ") args["args_clause_args"].append(str(min_ip)) args["args_clause"].append(" ip_hex < INET6_ATON( %s ) ") args["args_clause_args"].append(str(max_ip)) if args.get("subnet", None) is None and args.get("ip", None) is None: g.logger.warning( "No IP Given : This might be a Long Query but I'll allow it.") if args.get("hostid", None) is None and args.get("hostname", None) is None: g.logger.warning( "No Host Factor Given : This might be a Long Query but I'll allow it." ) requesttype = "ipsearch" links_info = dict() links_info["parent"] = "{}{}/ip".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_info["self"] = "{}{}/ip/search?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["qdeparsed_string"]) request_data = list() ip_search_query = '''select INET6_NTOA(ip_hex) as ip, ip_id, guessed_type, fk_host_id, hosts.hostname, hosts.pop, hosts.srvtype, hosts.hoststatus from ip_intel join hosts on ip_intel.fk_host_id = hosts.host_id where {}'''.format(" and ".join(args["args_clause"])) results = manoward.run_query(g.cur, ip_search_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for this_ip in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_ip["ip_id"] this_results["attributes"] = this_ip this_results["relationships"] = dict() # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_info, data=request_data, links=links_info)
def api2_collected_types_filtered(ctype="none"): ''' A slower filtered subtype query ''' args_def = { "ctype": { "req_type": str, "default": ctype, "required": True, "sql_param": True, "sql_clause": "collection.collection_type = %s", "qdeparse": False }, "usevalue": { "req_type": str, "default": "false", "required": False, "sql_param": False, "enum": ("true", "false"), "qdeparse": True } } args = manoward.process_args(args_def, request.args, coll_lulimit=g.twoDayTimestamp, include_hosts_sql=True, include_coll_sql=True, include_exact=True) meta_dict = dict() request_data = list() links_dict = dict() error_dict = dict() meta_dict["version"] = 2 meta_dict[ "name"] = "Jellyfish API Version 2 : Filtered Collected Subtypes for type {}".format( args["ctype"]) meta_dict["status"] = "In Progress" links_dict["parent"] = "{}{}/collected/types".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_dict["self"] = "{}{}/collected/subtypes_filered/{}?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["ctype"], args["qdeparsed_string"]) requesttype = "collection_subtype_filtered" do_query = True if args.get("usevalue", "false") == "true": group_value_get = ", collection_value as value " group_by_string = " group by collection_subtype, collection_value " else: group_value_get = " " group_by_string = " group by collection_subtype " collected_subtype_query = '''select distinct(collection_subtype) as subtype, count(*) as count {0} from collection join hosts ON fk_host_id = hosts.host_id where {1} {2}'''.format( group_value_get, " and ".join(args["args_clause"]), group_by_string) results = manoward.run_query(g.cur, collected_subtype_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for i in range(0, len(results.get("data", list()))): this_results = dict() this_results["type"] = requesttype this_results["id"] = i this_results["attributes"] = results.get("data", list())[i] # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_dict, data=request_data, links=links_dict)
def api2_factorlist(factor="pop"): ''' List Pops, Optionally List pops that have hosts contained within ''' if request.url_rule.rule.startswith("pop"): factor = "pop" elif request.url_rule.rule.startswith("srv"): factor = "srvtype" args_def = { "factor": { "req_type": str, "default": factor, "required": True, "sql_param": False, "qdeparse": True, "enum": ("pop", "srvtype") } # Extend when the time comes } args = manoward.process_args(args_def, request.args, lulimit=g.twoDayTimestamp, include_hosts_sql=True, include_exact=True) request_data = list() meta_info = dict() meta_info["version"] = 2 meta_info[ "name"] = "Factor ({}) List for Jellyfish2 API Version 2 ".format( args["factor"]) meta_info["state"] = "In Progress" meta_info["children"] = dict() requesttype = "factor_list" links_info = dict() links_info["parent"] = "{}{}/".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_info["self"] = "{}{}/factorlist?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["qdeparsed_string"]) factor_list_query = '''select DISTINCT {} as factor from hosts where {}'''.format( args["factor"], " and ".join(args["args_clause"])) results = manoward.run_query(g.cur, factor_list_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=False) for this_factor in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_factor["factor"] this_results["attributes"] = this_factor this_results["relationships"] = dict() this_results["relationships"][ "hostsof"] = "{}{}/hostsearch/?{}={}&{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args["factor"], this_factor["factor"], args["qdeparsed_string"]) # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_info, data=request_data, links=links_info)
def api2_auditlist(audit_name=None, audit_priority=None, audit_description=None, audit_long_description=None): ''' List out All the Audits that meet the Items Prescribed ''' # TODO add exact definitions args_def = { "audit_name": { "req_type": str, "default": audit_name, "required": False, "sql_param": True, "sql_clause": "audit_name REGEXP %s", "qdeparse": True }, "audit_description": { "req_type": str, "default": audit_description, "required": False, "sql_param": True, "sql_clause": "audit_short_description REGEXP %s", "qdeparse": True }, "audit_priority": { "req_type": str, "default": audit_priority, "required": False, "sql_param": True, "sql_clause": "audit_priority REGEXP %s", "qdeparse": True }, "audit_long_description": { "req_type": str, "default": audit_long_description, "required": False, "sql_param": True, "sql_clause": "audit_long_description REGEXP %s", "qdeparse": True }, } args = manoward.process_args(args_def, request.args) meta_info = dict() meta_info["version"] = 2 meta_info["name"] = "Audit List for Jellyfish2 API Version 2" meta_info["state"] = "In Progress" meta_info["children"] = dict() requesttype = "audit_list" links_info = dict() links_info["self"] = "{}{}/auditlist?{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], args.get("qdeparsed_string", "")) links_info["parent"] = "{}{}/".format(g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"]) links_info["children"] = dict() request_data = list() if len(args["args_clause_args"]) > 0: where_joiner = " where " where_clause_strings = " and ".join(args["args_clause"]) where_full_string = where_joiner + where_clause_strings else: where_full_string = " " audit_list_query = '''select audit_id, audit_name, audit_priority, audit_short_description, audit_primary_link from audits ''' audit_list_query = audit_list_query + where_full_string results = manoward.run_query(g.cur, audit_list_query, args=args["args_clause_args"], one=False, do_abort=True, require_results=True) for this_audit in results.get("data", list()): this_results = dict() this_results["type"] = requesttype this_results["id"] = this_audit["audit_id"] this_results["attributes"] = this_audit this_results["relationships"] = dict() this_results["relationships"][ "auditinfo"] = "{}{}/auditinfo/{}".format( g.config_items["v2api"]["preroot"], g.config_items["v2api"]["root"], this_audit["audit_id"]) # Now pop this onto request_data request_data.append(this_results) return jsonify(meta=meta_info, data=request_data, links=links_info)