def mock_request_batch(request, raw_path): """Handle a request batch.""" base_api_url = string.replace(request.path, raw_path, "") batch_items = json_parse(request.raw_post_data) responses = [] for item in batch_items["batch"]: batch_request = SimpleObject() #Mock a request object for each batch item. batch_request.method = item["type"] if "data" in item: batch_request.raw_post_data = batch_substitute(item["data"], responses) else: batch_request.raw_post_data = "" base_url = batch_substitute(item["url"], responses) print "Batch item: " + item["type"] + " " + base_url + " (" + item["url"] + ")" batch_request.user = request.user batch_request.path_info = force_unicode(base_url) batch_item_path = string.replace(base_url, base_api_url, "") batch_request.GET = {} query_delim_loc = batch_item_path.find('?') if query_delim_loc > -1: query_start = query_delim_loc + 1 query_split = urlparse.parse_qs(batch_item_path[query_start:]) batch_item_path = batch_item_path[:query_delim_loc] for k in query_split.keys(): batch_request.GET[k] = ','.join(query_split[k]) responses.append(mock_request_processor(batch_request, batch_item_path)) return {"responses":responses, "success":True}
def mock_post(request, raw_path): """Either update an individual item or create a new item.""" rp = get_request_properties(request, raw_path) collection = get_collection(rp) if rp.item_id != None: #It's an item, overlay passed values. if rp.item_id in collection["items"]: collection["items"][rp.item_id].update(json_parse(request.raw_post_data)) else: collection["items"][rp.item_id] = json_parse(request.raw_post_data) return status_message(True, "Item {} saved to {}.".format(rp.item_id, rp.key)) else: #It's a collection, create item next_id = str(collection["next_id"]) collection["next_id"] += 1 item_json = request.raw_post_data.replace('___id___', next_id) #Replace the embedded id placeholder with the actual ID. collection["items"][next_id] = json_parse(item_json) return status_message(True, next_id)
def mock_put(request, raw_path): """Replace or create an individual item at a specific key.""" rp = get_request_properties(request, raw_path) collection = get_collection(rp) if rp.item_id != None: #It's an item, overwrite it collection["items"][rp.item_id] = json_parse(request.raw_post_data) collection["next_id"] += 1 return status_message(True, "Item {} saved to {}.".format(rp.item_id, rp.key)) return status_message(False, "Operation undefined.")
def get_verified_data(jws, expected=None, session_token=None): headers = json_parse(get_jws_part(jws, 0)) raw_username = headers['kikUsr'] username = raw_username.lower() hostname = headers['kikCrdDm'].split('/')[0].lower() payload = get_jws_part(jws, 1) if expected is not None and payload != expected: logging.info('jws, payload does not match expected value') raise Exception('payload does not match expected value') try: data = json_parse(payload) except: data = None try: session = ndb.Key(urlsafe=session_token).get() except Exception: session = None if session is None or not isinstance(session, Session) \ or session.username != username or session.hostname != hostname: session_token = None if username not in TEST_USERS: verify_jws(jws, raw_username, hostname, (headers.get('kikDbg') and DEBUG)) elif not DEBUG: logging.info('jws, chrome user detected') raise Exception('chrome user detected') try: session = Session(username=username, hostname=hostname) session.put() session_token = session.key.urlsafe() except: pass return username, hostname, data, session_token
def initialize(self, *args, **kwargs): value = super(BaseHandler, self).initialize(*args, **kwargs) try: self.body_params = json_parse(self.request.body) except: self.body_params = {} self.params = {} self.params.update(self.request.params) self.params.update(self.body_params) try: session = self.request.headers.get(KIK_SESSION) if self.request.method in ('POST', 'PUT', 'PATCH'): jws = self.request.body payload = None else: jws = self.request.headers[KIK_JWS] payload = self.request.path from lib.jws import get_verified_data self.username, self.hostname, self.auth_params, self.kik_session = get_verified_data(jws, expected=payload, session_token=session) except: pass return value
def main(args): version = os.getenv("HIVE_VERSION") if not version: version = strftime("%d%b%Y", gmtime()) home = os.getenv("HIVE_HOME") output = "llap-slider-%(version)s" % ({"version": version}) parser = argparse.ArgumentParser() parser.add_argument("--instances", type=int, default=1) parser.add_argument("--output", default=output) parser.add_argument("--input", required=True) parser.add_argument("--args", default="") parser.add_argument("--name", default="llap0") parser.add_argument("--loglevel", default="INFO") parser.add_argument("--logger", default="RFA") parser.add_argument("--chaosmonkey", type=int, default=0) parser.add_argument("--slider-am-container-mb", type=int, default=1024) parser.add_argument("--slider-keytab-dir", default="") parser.add_argument("--slider-keytab", default="") parser.add_argument("--slider-principal", default="") parser.add_argument("--slider-default-keytab", dest='slider_default_keytab', action='store_true') parser.set_defaults(slider_default_keytab=False) parser.add_argument("--slider-placement", type=int, default=4) # Unneeded here for now: parser.add_argument("--hiveconf", action='append') #parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors") (args, unknown_args) = parser.parse_known_args(args) input = args.input output = args.output slider_am_jvm_heapsize = max(args.slider_am_container_mb * 0.8, args.slider_am_container_mb - 1024) slider_keytab_dir = args.slider_keytab_dir slider_keytab = args.slider_keytab slider_principal = args.slider_principal # set the defaults only if the defaults are enabled if args.slider_default_keytab: if not slider_keytab_dir: slider_keytab_dir = ".slider/keytabs/llap" if not slider_keytab: slider_keytab = "llap.keytab" if not slider_principal: slider_principal = "[email protected]" if not input: print "Cannot find input files" sys.exit(1) return config = json_parse(open(join(input, "config.json")).read()) java_home = config["java.home"] max_direct_memory = config["max_direct_memory"] daemon_args = args.args if long(max_direct_memory) > 0: daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory, daemon_args) resource = LlapResource(config) # 5% container failure every monkey_interval seconds monkey_percentage = 5 # 5% vars = { "home" : home, "version" : version, "instances" : args.instances, "heap" : resource.heap_size, "container.mb" : resource.container_size, "container.cores" : resource.container_cores, "hadoop_home" : os.getenv("HADOOP_HOME"), "java_home" : java_home, "name" : resource.clusterName, "daemon_args" : daemon_args, "daemon_loglevel" : args.loglevel, "daemon_logger" : args.logger, "queue.string" : resource.queueString, "monkey_interval" : args.chaosmonkey, "monkey_percentage" : monkey_percentage, "monkey_enabled" : args.chaosmonkey > 0, "slider.am.container.mb" : args.slider_am_container_mb, "slider_am_jvm_heapsize" : slider_am_jvm_heapsize, "slider_keytab_dir" : slider_keytab_dir, "slider_keytab" : slider_keytab, "slider_principal" : slider_principal, "placement" : args.slider_placement } if not exists(output): os.makedirs(output) src = join(home, "scripts", "llap", "bin") dst = join(input, "bin") if exists(dst): shutil.rmtree(dst) shutil.copytree(src, dst) # Make the zip package tmp = join(output, "tmp") pkg = join(tmp, "package") src = join(home, "scripts", "llap", "slider") dst = join(pkg, "scripts") if exists(dst): shutil.rmtree(dst) shutil.copytree(src, dst) with open(join(tmp, "metainfo.xml"),"w") as f: f.write(metainfo % vars) os.mkdir(join(pkg, "files")) tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" % version), "w:gz") # recursive add + -C chdir inside tarball.add(input, "") tarball.close() zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w") zipdir(tmp, zipped) zipped.close() # cleanup after making zip pkg shutil.rmtree(tmp) with open(join(output, "appConfig.json"), "w") as f: f.write(appConfig % vars) with open(join(output, "resources.json"), "w") as f: f.write(resources % vars) with open(join(output, "run.sh"), "w") as f: f.write(runner % vars) os.chmod(join(output, "run.sh"), 0700) print "Prepared %s/run.sh for running LLAP on Slider" % (output)
def main(args): version = os.getenv("HIVE_VERSION") if not version: version = strftime("%d%b%Y", gmtime()) home = os.getenv("HIVE_HOME") output = "llap-yarn-%(version)s" % ({"version": version}) parser = argparse.ArgumentParser() parser.add_argument("--instances", type=int, default=1) parser.add_argument("--output", default=output) parser.add_argument("--input", required=True) parser.add_argument("--args", default="") parser.add_argument("--name", default="llap0") parser.add_argument("--loglevel", default="INFO") parser.add_argument("--logger", default="query-routing") parser.add_argument("--service-am-container-mb", type=int, default=1024) parser.add_argument("--service-appconfig-global", nargs='*', type=service_appconfig_global_property, action='append') parser.add_argument("--service-keytab-dir", default="") parser.add_argument("--service-keytab", default="") parser.add_argument("--service-principal", default="") parser.add_argument("--service-default-keytab", dest='service_default_keytab', action='store_true') parser.add_argument("--service-placement", type=int, default=4) parser.add_argument("--health-percent", type=int, default=80) parser.add_argument("--health-time-window-secs", type=int, default=300) parser.add_argument("--health-init-delay-secs", type=int, default=400) parser.set_defaults(service_default_keytab=False) parser.add_argument("--startImmediately", dest='start_immediately', action='store_true') parser.add_argument("--javaChild", dest='java_child', action='store_true') parser.set_defaults(start_immediately=False) parser.set_defaults(java_child=False) # Unneeded here for now: parser.add_argument("--hiveconf", action='append') #parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors") (args, unknown_args) = parser.parse_known_args(args) if args.start_immediately and not args.java_child: sys.exit(0) return if args.java_child: print "%s Running as a child of LlapServiceDriver" % (strftime("%H:%M:%S", gmtime())) else: print "%s Running after LlapServiceDriver" % (strftime("%H:%M:%S", gmtime())) input = args.input output = args.output service_am_jvm_heapsize = max(args.service_am_container_mb * 0.8, args.service_am_container_mb - 1024) service_keytab_dir = args.service_keytab_dir service_keytab = args.service_keytab service_principal = args.service_principal # set the defaults only if the defaults are enabled if args.service_default_keytab: if not service_keytab_dir: service_keytab_dir = ".yarn/keytabs/llap" if not service_keytab: service_keytab = "llap.keytab" if not service_principal: service_principal = "[email protected]" service_keytab_path = service_keytab_dir if service_keytab_path: if service_keytab: service_keytab_path += "/" + service_keytab else: service_keytab_path = service_keytab if service_keytab_path: service_keytab_path = "hdfs:///user/hive/" + service_keytab_path if not input: print "Cannot find input files" sys.exit(1) return config = json_parse(open(join(input, "config.json")).read()) java_home = config["java.home"] max_direct_memory = config["max_direct_memory"] resource = LlapResource(config) daemon_args = args.args if long(max_direct_memory) > 0: daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory, daemon_args) daemon_args = " -Dhttp.maxConnections=%s %s" % ((max(args.instances, resource.executors) + 1), daemon_args) vars = { "home" : home, "version" : version, "instances" : args.instances, "heap" : resource.heap_size, "container.mb" : resource.container_size, "container.cores" : resource.container_cores, "hadoop_home" : os.getenv("HADOOP_HOME"), "java_home" : java_home, "name" : resource.clusterName, "daemon_args" : daemon_args, "daemon_loglevel" : args.loglevel, "daemon_logger" : args.logger, "queue.string" : resource.queueString, "service.am.container.mb" : args.service_am_container_mb, "service_appconfig_global_append": construct_service_site_global_string(args.service_appconfig_global), "service_am_jvm_heapsize" : service_am_jvm_heapsize, "service_keytab_path" : service_keytab_path, "service_principal" : service_principal, "placement" : args.service_placement, "health_percent": args.health_percent, "health_time_window": args.health_time_window_secs, "health_init_delay": args.health_init_delay_secs } if not exists(output): os.makedirs(output) src = join(home, "scripts", "llap", "bin") dst = join(input, "bin") if exists(dst): shutil.rmtree(dst) shutil.copytree(src, dst) # Make the llap tarball print "%s Prepared the files" % (strftime("%H:%M:%S", gmtime())) tarball = tarfile.open(join(output, "llap-%s.tar.gz" % version), "w:gz") # recursive add + -C chdir inside tarball.add(input, "") tarball.close() print "%s Packaged the files" % (strftime("%H:%M:%S", gmtime())) with open(join(output, "Yarnfile"), "w") as f: f.write(yarnfile % vars) with open(join(output, "run.sh"), "w") as f: f.write(runner % vars) os.chmod(join(output, "run.sh"), 0700) if not args.java_child: print "%s Prepared %s/run.sh for running LLAP on YARN" % (strftime("%H:%M:%S", gmtime()), output)
def main(args): version = os.getenv("HIVE_VERSION") if not version: version = strftime("%d%b%Y", gmtime()) home = os.getenv("HIVE_HOME") output = "llap-slider-%(version)s" % ({"version": version}) parser = argparse.ArgumentParser() parser.add_argument("--instances", type=int, default=1) parser.add_argument("--output", default=output) parser.add_argument("--input", required=True) parser.add_argument("--args", default="") parser.add_argument("--name", default="llap0") parser.add_argument("--loglevel", default="INFO") parser.add_argument("--chaosmonkey", type=int, default="0") # Unneeded here for now: parser.add_argument("--hiveconf", action='append') #parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors") (args, unknown_args) = parser.parse_known_args(args) input = args.input output = args.output if not input: print "Cannot find input files" sys.exit(1) return config = json_parse(open(join(input, "config.json")).read()) resource = LlapResource(config) # 5% container failure every monkey_interval seconds monkey_percentage = 5 # 5% vars = { "home" : home, "version" : version, "instances" : args.instances, "heap" : resource.heap_size, "container.mb" : resource.container_size, "container.cores" : resource.container_cores, "hadoop_home" : os.getenv("HADOOP_HOME"), "java_home" : os.getenv("JAVA_HOME"), "name" : args.name, "daemon_args" : args.args, "daemon_loglevel" : args.loglevel, "monkey_interval" : args.chaosmonkey, "monkey_percentage" : monkey_percentage, "monkey_enabled" : args.chaosmonkey > 0 } if not exists(output): os.makedirs(output) src = join(home, "scripts", "llap", "bin") dst = join(input, "bin") if exists(dst): shutil.rmtree(dst) shutil.copytree(src, dst) # Make the zip package tmp = join(output, "tmp") pkg = join(tmp, "package") src = join(home, "scripts", "llap", "slider") dst = join(pkg, "scripts") if exists(dst): shutil.rmtree(dst) shutil.copytree(src, dst) with open(join(tmp, "metainfo.xml"),"w") as f: f.write(metainfo % vars) os.mkdir(join(pkg, "files")) tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" % version), "w:gz") # recursive add + -C chdir inside tarball.add(input, "") tarball.close() zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w") zipdir(tmp, zipped) zipped.close() # cleanup after making zip pkg shutil.rmtree(tmp) with open(join(output, "appConfig.json"), "w") as f: f.write(appConfig % vars) with open(join(output, "resources.json"), "w") as f: f.write(resources % vars) with open(join(output, "run.sh"), "w") as f: f.write(runner % vars) os.chmod(join(output, "run.sh"), 0700) print "Prepared %s/run.sh for running LLAP on Slider" % (output)
def main(args): opts, args = getopt( args, "", [ "instances=", "output=", "input=", "args=", "name=", "loglevel=", "chaosmonkey=", "size=", "xmx=", "cache=", "executors=", "hiveconf=", ], ) version = os.getenv("HIVE_VERSION") if not version: version = strftime("%d%b%Y", gmtime()) home = os.getenv("HIVE_HOME") output = "llap-slider-%(version)s" % ({"version": version}) instances = 1 name = "llap0" d_args = "" d_loglevel = "INFO" input = None monkey = "0" for k, v in opts: if k in ("--input"): input = v elif k in ("--output"): output = v elif k in ("--instances"): instances = int(v) elif k in ("--name"): name = v elif k in ("--args"): d_args = v elif k in ("--loglevel"): d_loglevel = v elif k in ("--chaosmonkey"): monkey = v if not input: print "Cannot find input files" sys.exit(1) return config = json_parse(open(join(input, "config.json")).read()) resource = LlapResource(config) monkey_interval = int(monkey) # 5% container failure every monkey_interval seconds monkey_percentage = 5 # 5% vars = { "home": home, "version": version, "instances": instances, "heap": resource.heap_size, "container.mb": resource.container_size, "container.cores": resource.container_cores, "hadoop_home": os.getenv("HADOOP_HOME"), "java_home": os.getenv("JAVA_HOME"), "name": name, "daemon_args": d_args, "daemon_loglevel": d_loglevel, "monkey_interval": monkey_interval, "monkey_percentage": monkey_percentage, "monkey_enabled": monkey_interval > 0, } if not exists(output): os.makedirs(output) src = join(home, "scripts", "llap", "bin") dst = join(input, "bin") if exists(dst): shutil.rmtree(dst) shutil.copytree(src, dst) # Make the zip package tmp = join(output, "tmp") pkg = join(tmp, "package") src = join(home, "scripts", "llap", "slider") dst = join(pkg, "scripts") if exists(dst): shutil.rmtree(dst) shutil.copytree(src, dst) with open(join(tmp, "metainfo.xml"), "w") as f: f.write(metainfo % vars) os.mkdir(join(pkg, "files")) tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" % version), "w:gz") # recursive add + -C chdir inside tarball.add(input, "") tarball.close() zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w") zipdir(tmp, zipped) zipped.close() # cleanup after making zip pkg shutil.rmtree(tmp) with open(join(output, "appConfig.json"), "w") as f: f.write(appConfig % vars) with open(join(output, "resources.json"), "w") as f: f.write(resources % vars) with open(join(output, "run.sh"), "w") as f: f.write(runner % vars) os.chmod(join(output, "run.sh"), 0700) print "Prepared %s/run.sh for running LLAP on Slider" % (output)