def run_code(sources, pcap, version=BRO_VERSION): """Try to find a cached result for this submission If not found, submit a new job to the worker""" if version not in BRO_VERSIONS: version = BRO_VERSION metrics.log_execution(version) cache_key = "cache:" + hashlib.sha1(json.dumps([sources, pcap, version ])).hexdigest() job_id = r.get(cache_key) if job_id: metrics.log_cache_hit() r.expire(cache_key, CACHE_EXPIRE) r.expire('stdout:%s' % job_id, CACHE_EXPIRE + 5) r.expire('files:%s' % job_id, CACHE_EXPIRE + 5) r.expire('sources:%s' % job_id, SOURCES_EXPIRE) return job_id, get_stdout(job_id) job_id = get_job_id() job_data = { "job_id": job_id, "sources": sources, "pcap": pcap, "version": version } result = gm.get_client().submit_job("run_code", job_data) return job_id, result.result
def run_code_docker(sources, pcap=None, version=BRO_VERSION): if version not in BRO_VERSIONS: version = BRO_VERSION for s in sources: s['content'] = s['content'].replace("\r\n", "\n") s['content'] = s['content'].rstrip() + "\n" work_dir = tempfile.mkdtemp(dir="/brostuff") runbro_path = os.path.join(work_dir, "runbro") for s in sources: code_fn = os.path.join(work_dir, s['name']) with codecs.open(code_fn, 'w', encoding="utf-8") as f: f.write(s['content']) runbro_src = "./runbro" runbro_src_version_specific = "%s-%s" % (runbro_src, version) if os.path.exists(runbro_src_version_specific): runbro_src = runbro_src_version_specific shutil.copy(runbro_src, runbro_path) os.chmod(runbro_path, 0755) binds = {work_dir: {"bind": work_dir, "mode": "rw"}} if pcap: dst = os.path.join(work_dir, "file.pcap") if '.' in pcap: src = os.path.join(os.getcwd(), "static/pcaps", pcap) #FIXME: Work out a better way to share pcaps around #binds[src]={"bind": dst, "ro": True} shutil.copy(src, dst) else: contents = get_pcap_with_retry(pcap) if contents: with open(dst, 'w') as f: f.write(contents) #docker run -v /brostuff/tmpWh0k1x:/brostuff/ -n --rm -t -i bro_worker /bro/bin/bro /brostuff/code.bro print "Connecting to docker...." with r.lock("docker", 5) as lck: c = docker.Client() print "Creating Bro %s container.." % version host_config = docker.utils.create_host_config( binds=binds, dns=["127.0.0.1"], mem_limit="128m", network_mode="none", read_only=True, ) container = c.create_container( 'broplatform/bro:' + version, working_dir=work_dir, command=runbro_path, host_config=host_config, volumes=[work_dir], ) print "Starting container.." try: c.start(container) except Exception, e: shutil.rmtree(work_dir) gm.get_client().submit_job("remove_container", {"container": container}, background=True) raise
) print "Starting container.." try: c.start(container) except Exception, e: shutil.rmtree(work_dir) gm.get_client().submit_job("remove_container", {"container": container}, background=True) raise print "Waiting.." c.wait(container) print "Removing Container" gm.get_client().submit_job("remove_container", {"container": container}, background=True) files = {} for f in os.listdir(work_dir): if not f.endswith(".log"): continue full = os.path.join(work_dir, f) txt = read_fn(full) if txt.strip() or 'stdout' in f: files[f] = txt shutil.rmtree(work_dir) return files def run_code(gearman_worker, gearman_job): # job_id, sources, pcap, version print "run_code", gearman_job.data
import time import json import hashlib import traceback from redis import Redis import bro_ascii_reader import gm import metrics CACHE_EXPIRE = 60 * 10 SOURCES_EXPIRE = 60 * 60 * 24 * 30 BRO_VERSIONS = gm.get_client().submit_job("get_bro_versions", {}).result #Set the default bro version to the most recent version, unless that is master BRO_VERSION = BRO_VERSIONS[-1] if BRO_VERSION == 'master' and len(BRO_VERSION) > 1: BRO_VERSION = BRO_VERSIONS[-2] print "Available Bro versions %r. Using %r as default" % (BRO_VERSIONS, BRO_VERSION) r = Redis(host="redis") def get_job_id(): return str(r.incr("trybro:id")) def run_code(sources, pcap, version=BRO_VERSION):