def pollScanResults(self, event, context):
        # This function will take a Tenable.io scan ID, and
        # query Tenable.io API for the status of that scan, and
        # if completed, return the results a JSON object

        source_event = Event(event, context)
        data = source_event.parse()

        if data:
            target = Target(data.get('target'))
            if not target:
                self.logger.error("Target validation failed of: {}".format(
                    target.name))
                return False

            scanID = event['responses']['Tenablescan']['id']
            scanner = TIOScanner(logger=self.logger)
            json_result = scanner.scanResult(scanID, result_format="json")
            html_result = scanner.scanResult(scanID, result_format="html")
            if json_result and html_result:
                send_to_s3(target.name + "_tenablescan",
                           json_result,
                           client=self.s3_client,
                           bucket=self.s3_bucket)
                send_to_s3(target.name + "_tenablescan",
                           html_result,
                           client=self.s3_client,
                           bucket=self.s3_bucket)
                return {'statusCode': 200}
        else:
            self.logger.error("Unrecognized payload: {}".format(data))
            return False
예제 #2
0
def runScanFromQ(event, context):

    # This is needed for nmap static library and
    # dirb to be added to the path
    _environ = dict(os.environ)
    nmap_path = os.environ['LAMBDA_TASK_ROOT'] + '/vendor/nmap-standalone/'
    dirb_path = os.environ['LAMBDA_TASK_ROOT'] + '/vendor/dirb/'
    try:
        os.environ.update({'PATH': os.environ['PATH'] + ':' + nmap_path + ':' + dirb_path})
        # Read the queue
        for record, keys in event.items():
            for item in keys:
                if "body" in item:
                    message = item['body']
                    scan_type, target, uuid = message.split('|')
                    if scan_type == "httpobservatory":
                        scanner = HTTPObservatoryScanner(logger=logger)
                        scan_result = scanner.scan(target)
                        send_to_s3(target + "_httpobservatory", scan_result, client=S3_CLIENT, bucket=S3_BUCKET)
                    elif scan_type == "sshobservatory":
                        scanner = SSHObservatoryScanner(logger=logger)
                        scan_result = scanner.scan(target)
                        send_to_s3(target + "_sshobservatory", scan_result, client=S3_CLIENT, bucket=S3_BUCKET)
                    elif scan_type == "tlsobservatory":
                        scanner = TLSObservatoryScanner(logger=logger)
                        scan_result = scanner.scan(target)
                        send_to_s3(target + "_tlsobservatory", scan_result, client=S3_CLIENT, bucket=S3_BUCKET)
                    elif scan_type == "portscan":
                        scanner = PortScanner(target, logger=logger)
                        nmap_scanner = scanner.scanTCP()
                        while nmap_scanner.still_scanning():
                            # Wait for 1 second after the end of the scan
                            nmap_scanner.wait(1)
                    elif scan_type == "tenableio":
                        scanner = TIOScanner(logger=logger)
                        nessus_scanner = scanner.scan(target)
                        nessus_scanner.launch(wait=False)
                    elif scan_type == "websearch":
                        searcher = WebSearcher(logger=logger)
                        search_results = searcher.search(target)
                        send_to_s3(target + "_websearch", search_results, client=S3_CLIENT, bucket=S3_BUCKET)
                    elif scan_type == "direnumscan":
                        scanner = DirectoryEnumScanner(logger=logger)
                        return_code, direnum_result = scanner.scan(target)
                        send_to_s3(target + "_direnum", direnum_result, client=S3_CLIENT, bucket=S3_BUCKET)
                    else:
                        # Manually invoked, just log the message
                        logger.info("Message in queue: {}".format(message))
                else:
                    logger.error("Unrecognized message in queue: {}".format(message))

    except Exception as e:
        logger.error("Exception occurred while running scans from the queue: {}".format(e))
    finally:
        # Restore environment variables to their original values
        os.environ.update(_environ)
예제 #3
0
 def generateURL(self):
     # Generate a download URL for whichever results are in
     # the S3 bucket for that host, while keeping track of
     # missing results
     self.scan_output_dict, self.scan_output_list, status = self.__poll()
     if status == 404:
         return status, False, False
     else:
         # Does not matter if status is 200 or 202, we
         # will generate a signed URL, but also return
         # the result dictionary
         host_results_dir = os.path.join(self.base_results_path,
                                         self.hostname)
         ready = self.__prepareResults(host_results_dir)
         if ready:
             # Downloaded the output for the target on the "serverless" server
             # Now, we need to zip it up and upload back to S3.
             tgz_results = package_results(host_results_dir)
             s3_object = send_to_s3(self.hostname,
                                    tgz_results,
                                    client=self.s3_client,
                                    bucket=self.bucket)
             # We need to generate a signed URL now
             download_url = create_presigned_url(s3_object,
                                                 client=self.s3_client,
                                                 bucket=self.bucket)
             return status, self.scan_output_dict, download_url
         else:
             # Something went wrong, return False
             return status, False, False
예제 #4
0
    def generateDownloadURL(self):
        # While generating a signed URL, let's only generate
        # a signed URL if all tool output is available

        # Setting default status, HTTP 202 means "Accepted"
        status = 202
        # TODO: We need a timeout function here
        while status == 202:
            self.scan_output_list, status = self.__poll()
            time.sleep(1)
        # status here is either 200 or 404

        if status != 404:
            host_results_dir = os.path.join(self.base_results_path,
                                            self.hostname)
            ready = self.__prepareResults(host_results_dir)
            if ready:
                # Downloaded the output for the target on the "serverless" server
                # Now, we need to zip it up and upload back to S3.
                tgz_results = package_results(host_results_dir)
                print(self.bucket)
                print(self.s3_client)
                s3_object = send_to_s3(self.hostname,
                                       tgz_results,
                                       client=self.s3_client,
                                       bucket=self.bucket)
                # We need to generate a signed URL now
                download_url = create_presigned_url(s3_object,
                                                    client=self.s3_client,
                                                    bucket=self.bucket)
                return download_url, status
            else:
                # Something went wrong, return False
                return False, status
        else:
            # No results for the host found
            return False, status
 def callback_results(self, hostname, scan_result):
     send_to_s3(self.host + "_tcpscan",
                scan_result,
                client=S3_CLIENT,
                bucket=S3_BUCKET)