def bulk_query(self): """ Get the bulk Elasticsearch query to use to populate Elasticsearch data. :return: the bulk Elasticsearch query to use to populate Elasticsearch data. """ if self._bulk_query is None: self._bulk_query = BulkElasticsearchQuery() return self._bulk_query
def geolocate_ip_address( self, org_uuid=None, ip_address_uuid=None, ip_address_scan_uuid=None, order_uuid=None, ): """ Perform geolocation of the given IP address. :param org_uuid: The UUID of the organization to perform geolocation on behalf of. :param ip_address_uuid: The UUID of the IP address to geolocate. :param ip_address_scan_uuid: The UUID of the IP address scan to associate geolocation results with. :return: None """ logger.info( "Now geolocating IP address %s." % (ip_address_uuid,) ) geolocations = self.inspector.get_geolocations(use_class_c=True) bulk_query = BulkElasticsearchQuery() for geolocation in geolocations: geolocation_model = geolocation.to_es_model(model=self.ip_address_scan) bulk_query.add_model_for_indexing(model=geolocation_model, index=org_uuid) bulk_query.save() logger.info( "All geolocation data collected for IP address %s." % (ip_address_uuid,) )
def enumerate_cipher_suites_for_ssl_service( self, org_uuid=None, network_service_uuid=None, network_service_scan_uuid=None, order_uuid=None, ): """ Enumerate all of the cipher suites that the given SSL/TLS service supports. :param org_uuid: The UUID of the organization to enumerate cipher suites on behalf of. :param network_service_uuid: The UUID of the network service that is being scanned. :param network_service_scan_uuid: The UUID of the network service scan that this enumeration is a part of. :return: None """ logger.info( "Now enumerating supported cipher suites for network service %s." % (network_service_uuid, )) ip_address = self.network_service.ip_address.address port = self.network_service.port server_info = ServerConnectivityInfo(hostname=ip_address, ip_address=ip_address, port=port) try: server_info.test_connectivity_to_server() except ServerConnectivityError as e: logger.warning( "ServerConnectivityError thrown when attempting to inspect SSL at %s:%s: %s" % (ip_address, port, e.message)) return scanner = SynchronousScanner() bulk_query = BulkElasticsearchQuery() network_service_scan = self.network_service_scan for ssl_protocol, command in get_ssl_cipher_suite_commands(): result = scanner.run_scan_command(server_info, command()) ssl_support_record = SslSupportModel.from_database_model( network_service_scan, ssl_version=ssl_protocol, supported=len(result.accepted_cipher_list) > 0, ) ssl_support_record.accepted_ciphers = [ cipher.name for cipher in result.accepted_cipher_list ] ssl_support_record.rejected_ciphers = [ cipher.name for cipher in result.rejected_cipher_list ] ssl_support_record.errored_ciphers = [ cipher.name for cipher in result.errored_cipher_list ] ssl_support_record.preferred_cipher = result.preferred_cipher.name if result.preferred_cipher else None bulk_query.add_model_for_indexing(model=ssl_support_record, index=org_uuid) logger.info( "All cipher suite information converted to Elasticsearch data. Now updating via bulk query." ) bulk_query.save() logger.info( "Bulk query completed. SSL cipher suites enumerated for network service %s." % (network_service_uuid, ))
def crawl_web_service( self, web_service_uuid=None, org_uuid=None, web_service_scan_uuid=None, ): """ Crawl the given web service and index the results in Elasticsearch. :param web_service_uuid: The UUID of the web service to crawl. :param org_uuid: The UUID of the organization to crawl the web service on behalf of. :param web_service_scan_uuid: The UUID of the scan that this crawling session is part of. :return: None """ ip_address, port, hostname, use_ssl = get_endpoint_information_for_web_service( web_service_uuid=web_service_uuid, db_session=self.db_session, ) logger.info( "Now crawling endpoint at %s:%s for scan %s. Organization is %s." % (ip_address, port, web_service_scan_uuid, org_uuid) ) runner = CrawlRunner() results_file_path, results_wrapper = runner.crawl_endpoint_to_file( ip_address=ip_address, port=port, use_ssl=use_ssl, hostname=hostname, in_separate_process=True, ) logger.info( "Crawling completed for endpoint at %s:%s. Indexing results to Elasticsearch." % (ip_address, port) ) bulk_query = BulkElasticsearchQuery() web_service_scan = WebServiceScan.by_uuid(db_session=self.db_session, uuid=web_service_scan_uuid) site_url_wrapper = UrlWrapper.from_endpoint(hostname=hostname, port=port, path="/", use_ssl=use_ssl) for es_model in results_wrapper.iter_es_models(web_service_scan=web_service_scan, site_url=site_url_wrapper): bulk_query.add_model_for_indexing(model=es_model, index=org_uuid) logger.info( "Now updating Elasticsearch via bulk query. Total operations: %s." % (bulk_query.batch_length,) ) bulk_query.save() FilesystemHelper.delete_file(results_file_path) logger.info( "Elasticsearch updated with crawling results for endpoint %s:%s and local file deleted." % (ip_address, port) )
def _populate_elasticsearch(self): query = BulkElasticsearchQuery() query.add_models_for_indexing(models=self.__get_generic_web_resource_models(), index=self.organization.uuid) query.add_models_for_indexing(models=self.__get_html_web_resource_models(), index=self.organization.uuid) query.save()