def check_password_setting(self): if not len(self.password_extraction()): logs.ISSUE("No password has been set. ") logs.RECOMMENDATION("requirepass [your_password]") return 0 password = self.password_extraction()[0] if utils.check_pwd(password): logs.DEBUG('Password is strong') else: logs.ISSUE('Password could be easily guessed.') logs.RECOMMENDATION("requirepass [stronger passwor]")
def check_command(self): rename_settings = self.config_extraction() for i in ["config", "debug", "shutdown", "flushdb", "flushall", "eval"]: if i not in rename_settings: logs.ISSUE(f"{i} command is exposed to every user.") logs.RECOMMENDATION(f"rename-command {i} [UUID]") else: if utils.check_normal_pwd(rename_settings[i]) or rename_settings[i] == '""': logs.DEBUG('Config command is protected by random string or disabled') else: logs.ISSUE(f'{i} command\' new name could be easily guessed. ') logs.RECOMMENDATION(f"rename-command {i} [UUID]")
def check_encryption(self): if utils.get_item_from_obj(self.content, "spark.network.crypto.enabled", default="false") == "false": logs.ISSUE("Network encryption is not enabled") logs.RECOMMENDATION("spark.network.crypto.enable = true") else: logs.DEBUG('Network encryption is enabled') if utils.get_item_from_obj(self.content, "spark.io.encryption.enabled", default="false") == "false": logs.ISSUE("Disk encryption is not enabled") logs.RECOMMENDATION("spark.io.encryption.enable = true") else: logs.DEBUG('Disk encryption is enabled')
def check_acl(self): if utils.get_item_from_obj(self.content, "spark.acls.enable", default="false") == "false": logs.ISSUE("Access control not enabled for web portal") logs.RECOMMENDATION("spark.acls.enable = true") else: logs.DEBUG("Access control is enabled for web portal") if utils.get_item_from_obj(self.content, "spark.history.ui.acls.enable", default="false") == "false": logs.ISSUE("Access control not enabled for history server") logs.RECOMMENDATION("spark.history.ui.acls.enable = true") else: logs.DEBUG("Access control is enabled for history server")
def check_xss(self): if utils.get_item_from_obj(self.content, "spark.ui.xXssProtection", default="1;mode=block") == "0": logs.ISSUE("XSS protection is not enabled") logs.RECOMMENDATION("spark.ui.xXssProtection = 1") else: logs.DEBUG('XSS protection is enabled') if utils.get_item_from_obj(self.content, "spark.ui.xContentTypeOptions.enabled", default="true") == "false": logs.ISSUE("CORB protection is not enabled") logs.RECOMMENDATION("spark.ui.xContentTypeOptions.enabled = true") else: logs.DEBUG('CORB protection is enabled')
def check_ssl(self): if utils.get_item_from_obj(self.content, "spark.ssl.enabled", default="false") == "false": logs.ISSUE("SSL is not enabled") logs.RECOMMENDATION("spark.ssl.enable = true") else: logs.DEBUG('SSL is enabled')
def check_authentication(self): if utils.get_item_from_obj(self.content, "spark.authenticate", default="false") == "false": logs.ISSUE("Everyone can visit the instance") logs.RECOMMENDATION("spark.authenticate = true") else: logs.DEBUG("Authentication is enabled") password = utils.get_item_from_obj(self.content, "spark.authenticate.secret", default="") if utils.check_pwd(password): logs.DEBUG('Password is strong') else: logs.ISSUE('Password could be easily guessed.') logs.RECOMMENDATION( "spark.authenticate.secret [stronger passwor]")
def check_logging(self): if utils.get_item_from_obj(self.content, "spark.eventLog.enabled", default="false") == "false": logs.ISSUE("Logging is not enabled") logs.RECOMMENDATION("spark.eventLog.enabled = true") else: logs.DEBUG('Logging is enabled')
def check_global_ac(self): logs.INFO("Checking global access control") auth_method = utils.get_item_from_obj(self.conf_obj, "hadoop.security.authentication", default="simple") if auth_method == "simple": logs.ISSUE("Everyone can access the instance") logs.RECOMMENDATION("hadoop.security.authentication = kerberos") else: logs.DEBUG(f"Authentication method [{auth_method}] enabled") if utils.get_item_from_obj(self.conf_obj, "hadoop.security.authorization", default="false") == "false": logs.ISSUE("Authorization is not enabled") logs.RECOMMENDATION("hadoop.security.authorization = true") else: logs.DEBUG("Authorization enabled")
def check_web_portal_ac(self): logs.INFO("Checking web portal access control") auth_method = utils.get_item_from_obj( self.conf_obj, "hadoop.http.authentication.type", default="simple") if auth_method == "simple": logs.ISSUE("Everyone can access the web portal") logs.RECOMMENDATION("hadoop.http.authentication.type = kerberos") if utils.get_item_from_obj( self.conf_obj, "hadoop.http.authentication.simple.anonymous.allowed", default="true") == "true": logs.ISSUE("Anonymous is allowed to access web portal.") logs.RECOMMENDATION( "hadoop.http.authentication.simple.anonymous.allowed = false" ) else: logs.DEBUG(f"Authentication method [{auth_method}] enabled")
def check_fs_permission(self): logs.INFO("Checking hdfs permission") if utils.get_item_from_obj(self.conf_obj, "dfs.permissions.enabled", default="true") == "false": logs.ISSUE( "HDFS does not have access control. Everyone could conduct CURD operations on the instance." ) logs.RECOMMENDATION("dfs.permissions.enabled = true") else: logs.DEBUG("HDFS permission system is enabled.") if utils.get_item_from_obj(self.conf_obj, "dfs.namenode.acls.enabled", default="false") == "false": logs.ISSUE("HDFS ACLs is not enabled.") logs.RECOMMENDATION("dfs.namenode.acls.enabled = true") else: logs.DEBUG("HDFS ACLs is enabled.")
def check_ssl(self): logs.INFO("Checking SSL") if utils.get_item_from_obj(self.conf_obj, "hadoop.ssl.enabled", default="false") == "false": logs.ISSUE("SSL is disabled.") logs.RECOMMENDATION("hadoop.ssl.enabled = true") else: logs.DEBUG("SSL is enabled.")
def check_exposure(self): try: ips = self.ip_extraction()[0].split() for ip in ips: if not utils.is_internal(ip): logs.ISSUE(f"Redis is set to be exposed to the internet ({ip}).") logs.RECOMMENDATION("bind [internal_ip]") else: logs.DEBUG(f"Redis is only exposed to internal network ({ip})") except IndexError: logs.ERROR("No IP is extracted from config file. Is the config file correct?")
def check_nfs_export_range(self): logs.INFO("Checking export range") allowed_hosts = utils.get_item_from_obj(self.conf_obj, "nfs.exports.allowed.hosts", default="* rw") if allowed_hosts == "* rw": logs.ISSUE("NFS is exposed to internet for read and write.") logs.RECOMMENDATION(" / qualify nfs.exports.allowed.hosts") else: logs.DEBUG( f"NFS host priv: {allowed_hosts}. Evaluate based on the context." )
def check_registry_ac(self): logs.INFO("Checking registry access control") if utils.get_item_from_obj(self.conf_obj, "hadoop.registry.rm.enabled", default="false") == "true": if utils.get_item_from_obj(self.conf_obj, "hadoop.registry.secure", default="false") == "false": logs.ISSUE("registry.secure is not enabled. ") logs.RECOMMENDATION("hadoop.registry.secure = true") else: logs.DEBUG(f"Registry security is enabled.") else: logs.DEBUG("Registry is not enabled. ")
def check_cors(self): logs.INFO("Checking web portal cross origin policy") if utils.get_item_from_obj(self.conf_obj, "hadoop.http.cross-origin.enabled", default="false") == "true": allowed_origins = utils.split_ip( utils.get_item_from_obj( self.conf_obj, "hadoop.http.cross-origin.allowed-origins", default="true")) if "*" in allowed_origins: logs.ISSUE("Cross origin is wildcard.") logs.RECOMMENDATION( " / qualify hadoop.http.cross-origin.allowed-origins") else: logs.DEBUG( f"CORS is enabled but only allowed to {','.join(allowed_origins)}" ) else: logs.DEBUG("CORS is off")