def get_service_detail(): global apiext_status """ GET /system/status :return: list of service details """ request_inputs = anchore_engine.services.common.do_request_prep( request, default_params={}) user_auth = request_inputs['auth'] method = request_inputs['method'] params = request_inputs['params'] httpcode = 500 service_detail = {} try: try: try: service_detail['service_states'] = [] try: up_services = {} service_records = catalog.get_service(user_auth) for service in service_records: el = make_response_service(user_auth, service, params) service_detail['service_states'].append(el) if el['servicename'] not in up_services: up_services[el['servicename']] = 0 if el['status']: up_services[el['servicename']] += 1 except Exception as err: pass service_detail['queues'] = {} ret_queues = {} try: queues = simplequeue.get_queues(user_auth) for queuename in queues: ret_queues[queuename] = {} qlen = simplequeue.qlen(user_auth, queuename) ret_queues[queuename]['qlen'] = qlen service_detail['queues'] = ret_queues except: pass service_detail['error_event'] = [] try: events = catalog.get_event(user_auth) for event in events: el = {} for k in ['message_ts', 'hostId', 'message', 'level']: el[k] = event[k] service_detail['error_event'].append(el) except: pass httpcode = 200 except Exception as err: return_object = anchore_engine.services.common.make_response_error( err, in_httpcode=httpcode) httpcode = return_object['httpcode'] except: service_detail = {} return_object = service_detail except Exception as err: return_object = str(err) apiext_status.update(return_object) return (return_object, httpcode)
def monitor_func(**kwargs): global click, running, last_run, queuename, system_user_auth timer = int(time.time()) if click < 5: click = click + 1 logger.debug("Analyzer starting in: " + str(5 - click)) return (True) if round(time.time() - last_run) < kwargs['kick_timer']: logger.spew("timer hasn't kicked yet: " + str(round(time.time() - last_run)) + " : " + str(kwargs['kick_timer'])) return (True) try: running = True last_run = time.time() logger.debug("FIRING: analyzer") localconfig = anchore_engine.configuration.localconfig.get_config() system_user_auth = localconfig['system_user_auth'] queues = simplequeue.get_queues(system_user_auth) if not queues: logger.warn( "could not get any queues from simplequeue client, cannot do any work" ) elif queuename not in queues: logger.error( "connected to simplequeue, but could not find queue (" + queuename + "), cannot do any work") else: try: try: myconfig = localconfig['services']['analyzer'] max_analyze_threads = int(myconfig['max_threads']) except: max_analyze_threads = 1 logger.debug("max threads: " + str(max_analyze_threads)) threads = [] for i in range(0, max_analyze_threads): if simplequeue.qlen(system_user_auth, queuename) > 0: qobj = simplequeue.dequeue(system_user_auth, queuename) if qobj: myqobj = copy.deepcopy(qobj) logger.spew("incoming queue object: " + str(myqobj)) logger.debug("incoming queue task: " + str(myqobj.keys())) logger.debug("starting thread") athread = threading.Thread( target=process_analyzer_job, args=( system_user_auth, myqobj, )) athread.start() threads.append(athread) logger.debug("thread started") # rc = process_analyzer_job(system_user_auth, myqobj) else: logger.debug( "analyzer queue is empty - no work this cycle") for athread in threads: logger.debug("joining thread") athread.join() logger.debug("thread joined") except Exception as err: logger.error(str(err)) except Exception as err: logger.error(str(err)) finally: running = False logger.debug("FIRING DONE: analyzer: " + str(int(time.time()) - timer)) return (True)