예제 #1
0
    def coordinate(self):
        idsetQueue = Queue.Queue()
        #outboundQueue = Queue.Queue()

        # logic here is more or less simple, we have a pool of threads of size T
        # load them up, and they sit on the queue, until they are signaled not
        # to expect anything else; then exit

        control = {"active": True, "out_active": True}
        for i in range(self.config['num_threads']):
            qlogger.info("starting thread %d" % (i))
            th = Thread(target=self.loadDetections, args=(i, control, idsetQueue))
            th.setDaemon(True)
            th.start()
            self.detectionWorkers.append(th)
        #end for

        # for reference
        #outputProcessor = Thread(target=self.handleOutput, args=(control, outboundQueue))
        #outputProcessor.setDaemon(True)
        #outputProcessor.start()

        # now we drive with assethost stuff

        hip = HostIdsFifoPopulator(idsetQueue)
        hip.run()

        control['active'] = False
        # clean up the queue, clean up the threads
        idsetQueue.join()
        for th in self.detectionWorkers:
            th.join()
예제 #2
0
    def _post_parse(self):

        if self.create_lookup_csv:
            lookup_destination = APP_ROOT + '/lookups/qualys_kb.csv'
            qlogger.info("Update lookup file: %s with %s QIDs",
                         lookup_destination, len(self._qids))
            with open(lookup_destination, "w") as csvfile:
                writer = csv.DictWriter(csvfile,
                                        fieldnames=self.CSV_HEADER_COLUMNS)
                writer.writeheader()
                for qid in self._qids:
                    writer.writerow(self._qids[qid])
            qlogger.info("Updated lookup file: %s with %s QIDs",
                         lookup_destination, len(self._qids))
예제 #3
0
    def handleOutput(self, control, outBoundQueue):
        """
        It is possible to have the detection API output get piped into another queue,
        and this would be a serial way to process the detections.  However, since the
        loggin facility in python is used for writing out data to splunk, and it is
        thread-safe, there's practically no need for it.

        :param control:
        :param outBoundQueue:
        :return:
        """

        while True:
            try:
                qlogger.info("getting output item")
                item = outBoundQueue.get(False)
                qlogger.info("Output Thread: %s", item)
                outBoundQueue.task_done()
            except Queue.Empty, e:
                if control['out_active'] == False:
                    qlogger.info("output thread exiting")
                    break
                else:
                    qlogger.info("output thread waiting for work")
                    time.sleep(5)
                    continue
예제 #4
0
    def loadDetections(self, id, control, idsetQueue, outBoundQueue=None):
        """
        :param id: int
        :param control: dict
        :param idsetQueue: Queue.Queue
        :param outBoundQueue: Queue
        :return:
        """

        #TODO make this a thread object

        while True:
            """
            :type item: IDSet
            """
            try:
                qlogger.info("getting idset inbound queue...")
                item = idsetQueue.get(False)
                # do something
                qlogger.info("processing idset: %s", item.tostring())

                thdp = ThreadedHostDetectionPopulator(item.tostring(), self.hostDetectionConfiguration, outBoundQueue)
                thdp.run()
                if thdp.get_host_logged_count > 0:
                    self.lock.acquire()
                    try:
                        self.host_logged += thdp.get_host_logged_count
                    except e:
                        qlogger.error(e)
                    finally:
                        self.lock.release()

                #outBoundQueue.put(item.tostring())
                idsetQueue.task_done()
            except Queue.Empty, e:
                qlogger.info("inboundqueue empty")
                if control['active'] == False:
                    qlogger.info("inbound queue exiting")
                    break
                else:
                    qlogger.info("waiting for more work")
                    time.sleep(5)
                    continue
예제 #5
0
파일: common.py 프로젝트: TPLink32/spnk1
    def run(self, api_user=None, api_password=None, configuration_dict=None):
        """


        :type configuration_dict: dict
        :param api_user:
        :param api_password:
        :param configuration_dict:
        """

        qlogger.info("Start")
        if not configuration_dict:
            configuration_dict = {}

        # Merge passed settings with Default APP settings
        configuration_dict = dict(default_settings.items + configuration_dict.items())
        log_output_directory = configuration_dict.get('log_output_directory', None)
        if log_output_directory != '' and os.path.isdir(log_output_directory) and os.access(
                log_output_directory, os.W_OK):
            pass
        else:
            del configuration_dict['log_output_directory']
예제 #6
0
파일: run.py 프로젝트: TPLink32/spnk1
    import getpass

    api_password = getpass.getpass("QG Password:"******"Using proxy")

    if log_kb_api:
        kb_log_populator = qualys.qualys_log_populator.QualysKBPopulator(settings=app_config)
        kb_log_populator.populate_lookup_table = True
        kb_log_populator.preserve_api_output = preserve_api_output
        kb_log_populator.run()

    if log_detection_api:
        detection_populator = qualys.qualys_log_populator.QualysDetectionPopulator(settings=app_config)
        detection_populator.preserve_api_output = preserve_api_output
        detection_populator.run()



except qualys.splunkpopulator.utils.QualysAPIClientException, e: