Example #1
0
    def test_queue_control(self):
        print "checking if queues are consistent"
        queue = json_parse(request("GET", "/api/queue/", self.client_auth))
        self.assertEqual(
            len(queue),
            int(request("GET", "/api/queue/length/", self.client_auth)))
        for i in range(len(queue)):
            self.assertEqual(
                json_encode(queue[i]),
                request("GET", "/api/queue/" + str(i), self.client_auth))

        print "adding to queue"
        video_to_add = {
            "service": "youtube",
            "type": "video",
            "id": "oiubwd",
            "timestamp": "0"
        }
        request("POST", "/api/queue/", self.control_auth,
                json_encode(video_to_add))
        self.assertEqual(
            len(queue) + 1,
            int(request("GET", "/api/queue/length/", self.client_auth)))
        self.assertEqual(
            json_encode(video_to_add),
            request("GET", "/api/queue/" + str(len(queue)), self.client_auth))
Example #2
0
 def add_video_to_queue(self):
     if not self.require_control_auth():
         return
     try:
         video = json_parse(self.POST_data)
         if type(video) is not type(
             {}
         ) or "service" not in video or "id" not in video or "type" not in video:
             raise
     except:
         self.send_error(
             400,
             "The submitted data should be a JSON dictionary. See the API definition for the required content."
         )
         return
     if "timestamp" not in video:
         video["timestamp"] = 0
     for key in video:
         if key not in ["service", "id", "type", "timestamp"]:
             self.send_error(
                 400,
                 "'" + key + "' is an invalid attribute for a video object")
             return
     self.server.queue.append(video)
     self.send_response(204)
     print "Added Video to Queue", video
Example #3
0
def mock_request_batch(request, raw_path):
    """Handle a request batch."""
    base_api_url = string.replace(request.path, raw_path, "")
    batch_items = json_parse(request.raw_post_data)
    responses = []
    for item in batch_items["batch"]:
        batch_request = SimpleObject() #Mock a request object for each batch item.
        batch_request.method = item["type"]
        if "data" in item:
            batch_request.raw_post_data = batch_substitute(item["data"], responses)
        else:
            batch_request.raw_post_data = ""
            
        base_url = batch_substitute(item["url"], responses)
        print "Batch item: " + item["type"] + " " + base_url + " (" + item["url"] + ")"
        
        batch_request.user = request.user
        batch_request.path_info = force_unicode(base_url)
        batch_item_path = string.replace(base_url, base_api_url, "")
        
        batch_request.GET = {}
        
        query_delim_loc = batch_item_path.find('?')
        if query_delim_loc > -1:
            query_start = query_delim_loc + 1
            query_split = urlparse.parse_qs(batch_item_path[query_start:])
            batch_item_path = batch_item_path[:query_delim_loc]
            
            for k in query_split.keys():
                batch_request.GET[k] = ','.join(query_split[k])
            
        
        responses.append(mock_request_processor(batch_request, batch_item_path))
    return {"responses":responses, "success":True}
Example #4
0
    def test_playback_control(self):
        request("GET", "/api/playback/state", self.client_auth)
        request("POST", "/api/playback/state", self.control_auth,
                "{\"paused\":true}")

        self.assertFalse("paused" in json_parse(
            request("GET", "/api/playback/events", self.client_auth)))

        request("POST", "/api/playback/state", self.control_auth,
                "{\"paused\":false}")

        self.assertFalse(
            json_parse(request("GET", "/api/playback/state",
                               self.client_auth))["paused"])
        self.assertFalse(
            json_parse(request("GET", "/api/playback/events",
                               self.client_auth))["paused"])
Example #5
0
def mock_post(request, raw_path):
    """Either update an individual item or create a new item."""
    rp = get_request_properties(request, raw_path)
    collection = get_collection(rp)
    
    if rp.item_id != None: #It's an item, overlay passed values.
        if rp.item_id in collection["items"]:
            collection["items"][rp.item_id].update(json_parse(request.raw_post_data))
        else:
            collection["items"][rp.item_id] = json_parse(request.raw_post_data)
        return status_message(True, "Item {} saved to {}.".format(rp.item_id, rp.key))
    
    else: #It's a collection, create item
        next_id = str(collection["next_id"]) 
        collection["next_id"] += 1
        item_json = request.raw_post_data.replace('___id___', next_id) #Replace the embedded id placeholder with the actual ID.
        collection["items"][next_id] = json_parse(item_json)
        return status_message(True, next_id)
Example #6
0
  def data(self, data):
    clone = self.clone()

    clone._data = data

    json_data = json_parse(data.to_json(orient='records'))
    clone._schema = data2schema(json_data)

    clone._fields = list(self._schema['stats'].keys()) 
    return clone
Example #7
0
  def __init__(self, data):
    self._data = data

    json_data = json_parse(data.to_json(orient='records'))
    self._schema = data2schema(json_data)
    self._encodings = []

    self._fields = list(self._schema['stats'].keys())
    self._anchor = None
    self._sol = None
    self._solved = False
Example #8
0
 def test_client_migrate(self):
     request("GET", "/api/auth/client")
     playback_delta = json_parse(
         request("GET", "/api/playback/events", self.client_auth))
     self.assertTrue("newClientRequested" in playback_delta)
     self.assertTrue(playback_delta["newClientRequested"])
     request("DELETE", "/api/auth/client", self.client_auth)
     new_client_key = request("GET", "/api/auth/client")
     self.assertEqual(len(new_client_key), 32)
     self.assertEqual(
         request("GET", "/api/auth/verify", "Client:" + new_client_key),
         "client")
Example #9
0
    def assert_logger_default_input():
        file = open('res/event_aws.txt')
        assert file is not None or not file

        event = json_parse(file.read())
        assert type(event) is dict
        assert type(event['requestContext']) is dict
        assert event['requestContext']['stage'] == 'qa'
        assert event['requestContext'][
            'requestId'] == '9a294c37-76d2-4ace-b340-0b9c59440d12'

        return event
Example #10
0
def mock_put(request, raw_path):
    """Replace or create an individual item at a specific key."""
    rp = get_request_properties(request, raw_path)
    collection = get_collection(rp)
    
    if rp.item_id != None: #It's an item, overwrite it
        collection["items"][rp.item_id] = json_parse(request.raw_post_data)
        collection["next_id"] += 1
        
        return status_message(True, "Item {} saved to {}.".format(rp.item_id, rp.key))
        
    return status_message(False, "Operation undefined.")
Example #11
0
def get_verified_data(jws, expected=None, session_token=None):
    headers = json_parse(get_jws_part(jws, 0))
    raw_username = headers['kikUsr']
    username = raw_username.lower()
    hostname = headers['kikCrdDm'].split('/')[0].lower()
    payload = get_jws_part(jws, 1)

    if expected is not None and payload != expected:
        logging.info('jws, payload does not match expected value')
        raise Exception('payload does not match expected value')

    try:
        data = json_parse(payload)
    except:
        data = None

    try:
        session = ndb.Key(urlsafe=session_token).get()
    except Exception as e:
        session = None
    if session is None or not isinstance(
            session, Session
    ) or session.username != username or session.hostname != hostname:
        session = None
        session_token = None
        if username not in TEST_USERS:
            verify_jws(jws, raw_username, hostname,
                       (headers.get('kikDbg') and DEBUG))
        elif not DEBUG:
            logging.info('jws, chrome user detected')
            raise Exception('chrome user detected')
        try:
            session = Session(username=username, hostname=hostname)
            session.put()
            session_token = session.key.urlsafe()
        except:
            pass

    return username, hostname, data, session_token
Example #12
0
def argo_http(api, param=None, session=None, fromhost=None):
    '''Make a http post request of `ARGO_PREFIX`

    session need dict() and .update function to
     keep session data.'''

    # merge args
    if session:
        args = dict(session)
    else:
        args = {}
    if param:
        args.update(param)
    if fromhost:
        args['fromhost'] = fromhost

    # parase data
    text = urlopen(ARGO_PREFIX + api, urlencode(args)).read()

    if not text:
        raise Exception('Empty response url[%s] params[%s]' %
                        ((ARGO_PREFIX + api), urlencode(args)))

    try:
        text = text.decode('utf-8')
    except UnicodeEncodeError as e:
        logger.warning("Cannot decode %r" % text)
        raise e

    #TODO try to catch unicode decode error
    open('rep.txt', 'wb').write(
        ('%s%s?%s\n\n%s' % (ARGO_PREFIX.encode('utf8'), api, urlencode(args),
                            text)).encode('utf8'))

    if text[-1] == '!':
        seq = text.find('\n')
        session.update(json_parse(text[seq + 1:-1]))
        return json_parse(text[0:seq])
    return json_parse(text)
Example #13
0
def increase_funding_limit_request(request):
    user = request.user if request.user.is_authenticated else None
    profile = request.user.profile if user and hasattr(request.user,
                                                       'profile') else None
    usdt_per_tx = request.GET.get('usdt_per_tx', None)
    usdt_per_week = request.GET.get('usdt_per_week', None)
    is_staff = user.is_staff if user else False

    if is_staff and usdt_per_tx and usdt_per_week:
        try:
            profile_pk = request.GET.get('profile_pk', None)
            target_profile = Profile.objects.get(pk=profile_pk)
            target_profile.max_tip_amount_usdt_per_tx = usdt_per_tx
            target_profile.max_tip_amount_usdt_per_week = usdt_per_week
            target_profile.save()
        except Exception as e:
            return JsonResponse({'error': str(e)}, status=400)

        return JsonResponse({'msg': _('Success')}, status=200)

    if request.body:
        if not user or not profile or not profile.handle:
            return JsonResponse(
                {
                    'error':
                    _('You must be Authenticated via Github to use this feature!'
                      )
                },
                status=401)

        try:
            result = FundingLimitIncreaseRequestForm(json_parse(request.body))
            if not result.is_valid():
                raise
        except Exception as e:
            return JsonResponse({'error': _('Invalid JSON.')}, status=400)

        new_funding_limit_increase_request(profile, result.cleaned_data)

        return JsonResponse({'msg': _('Request received.')}, status=200)

    form = FundingLimitIncreaseRequestForm()
    params = {
        'form': form,
        'title': _('Request a Funding Limit Increase'),
        'card_title': _('Gitcoin - Request a Funding Limit Increase'),
        'card_desc': _('Do you hit the Funding Limit? Request a increasement!')
    }

    return TemplateResponse(request,
                            'increase_funding_limit_request_form.html', params)
Example #14
0
def get_verified_data(jws, expected=None, session_token=None):
    headers = json_parse(get_jws_part(jws, 0))
    raw_username = headers['kikUsr']
    username = raw_username.lower()
    hostname = headers['kikCrdDm'].split('/')[0].lower()
    payload = get_jws_part(jws, 1)

    if expected is not None and payload != expected:
        logging.info('jws, payload does not match expected value')
        raise Exception('payload does not match expected value')

    try:
        data = json_parse(payload)
    except:
        data = None

    try:
        session = ndb.Key(urlsafe=session_token).get()
    except Exception:
        session = None
    if session is None or not isinstance(session, Session) \
            or session.username != username or session.hostname != hostname:
        session_token = None
        if username not in TEST_USERS:
            verify_jws(jws, raw_username, hostname, (headers.get('kikDbg') and DEBUG))
        elif not DEBUG:
            logging.info('jws, chrome user detected')
            raise Exception('chrome user detected')
        try:
            session = Session(username=username, hostname=hostname)
            session.put()
            session_token = session.key.urlsafe()
        except:
            pass

    return username, hostname, data, session_token
Example #15
0
def __update_user(request, user_id):
    response = None

    try:
        userData = {
            k: v
            for k, v in json_parse(request.body.decode()).items() if v
        }
        userForm = UserRegistryUpdateForm(userData, None)
        fields = userForm.Meta.fields
        user = UserRegistry.objects.only(*fields).get(
            id=user_id) if userForm.is_valid() else None

        if (user):
            for field in fields:
                setattr(user, field, userForm.cleaned_data[field])

            user.save(update_fields=fields)

            response = {
                'result': ResponseResult.successful,
                'message': ResponseMessage.updated_user,
                'data': {
                    'user': user_id
                }
            }
        else:
            response = {
                'result': ResponseResult.failed,
                'message': ResponseMessage.failed_to_update_user,
                'data': {
                    'user': user_id
                }
            }
    except:
        response = {
            'result': ResponseResult.error,
            'message': ResponseMessage.error_updating_user,
            'data': {
                'user': user_id
            }
        }

    return response
Example #16
0
    def assert_response(response):

        assert 'statusCode' in response
        assert type(response['statusCode']) is int

        assert 'headers' in response
        assert type(response['headers']) is dict

        assert 'body' in response
        assert type(response['body']) is str

        headers = dict((h.lower(), v) for h, v in response['headers'].items())
        assert headers['content-type'] == 'application/json'

        body = json_parse(response['body'])
        assert 'r' in body
        assert type(body['r']) is str

        return body
Example #17
0
 def __init__(self):
     HTTPServer.__init__(self, ('', SERVER_PORT), WAYWRequestHandler)
     create_file = True
     if (os.path.isfile(QUEUE_FILE_PATH)):
         f = open(QUEUE_FILE_PATH, "r")
         try:
             self.queue = json_parse(f.read())
         except:
             self.queue = None
         f.close()
         if type(self.queue) is not type([]):
             print "Data present in queue file is bad."
         else:
             create_file = False
     if create_file:
         f = open(QUEUE_FILE_PATH, "w")
         f.write("[]")
         f.close()
         self.queue = []
Example #18
0
    def apply_playback_state(self):
        if not self.require_control_auth():
            return
        try:
            playback_state_delta = json_parse(self.POST_data)
            if type(playback_state_delta) is not type({}):
                raise
        except:
            self.send_error(400,
                            "The submitted data should be a JSON dictionary")
            return
        for key in playback_state_delta:
            if key in self.server.playback_state and key != "newClientRequested":
                if self.server.playback_state[key] != playback_state_delta[key]:
                    self.server.playback_state_delta[
                        key] = playback_state_delta[key]
                self.server.playback_state[key] = playback_state_delta[key]

        self.send_response(204)
	def initialize(self, *args, **kwargs):
		value = super(BaseHandler, self).initialize(*args, **kwargs)
		try:
			self.body_params = json_parse(self.request.body)
		except:
			self.body_params = {}
		self.params = {}
		self.params.update(self.request.params)
		self.params.update(self.body_params)
		try:
			session = self.request.headers.get(KIK_SESSION)
			if self.request.method in ('POST', 'PUT', 'PATCH'):
				jws = self.request.body
				payload = None
			else:
				jws = self.request.headers[KIK_JWS]
				payload = self.request.path
			from lib.jws import get_verified_data
			self.username, self.hostname, self.auth_params, self.kik_session = get_verified_data(jws, expected=payload, session_token=session)
		except:
			pass
		return value
Example #20
0
 def initialize(self, *args, **kwargs):
     value = super(BaseHandler, self).initialize(*args, **kwargs)
     try:
         self.body_params = json_parse(self.request.body)
     except:
         self.body_params = {}
     self.params = {}
     self.params.update(self.request.params)
     self.params.update(self.body_params)
     try:
         session = self.request.headers.get(KIK_SESSION)
         if self.request.method in ('POST', 'PUT', 'PATCH'):
             jws = self.request.body
             payload = None
         else:
             jws = self.request.headers[KIK_JWS]
             payload = self.request.path
         from lib.jws import get_verified_data
         self.username, self.hostname, self.auth_params, self.kik_session = get_verified_data(
             jws, expected=payload, session_token=session)
     except:
         pass
     return value
Example #21
0
	slider_principal = args.slider_principal
	# set the defaults only if the defaults are enabled
	if args.slider_default_keytab:
		if not slider_keytab_dir:
			slider_keytab_dir = ".slider/keytabs/llap"
		if not slider_keytab:
			slider_keytab = "llap.keytab"
		if not slider_principal:
			slider_principal = "*****@*****.**"
	if not input:
		print "Cannot find input files"
		sys.exit(1)
		return
    remote_dir = args.remote_dir

	config = json_parse(open(join(input, "config.json")).read())
	java_home = config["java.home"]
	max_direct_memory = config["max_direct_memory"]

	resource = LlapResource(config)

	daemon_args = args.args
	if long(max_direct_memory) > 0:
		daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory, daemon_args)
	daemon_args = " -Dhttp.maxConnections=%s %s" % ((max(args.instances, resource.executors) + 1), daemon_args)
	# 5% container failure every monkey_interval seconds
	monkey_percentage = 5 # 5%
	vars = {
		"home" : home,
		"version" : version,
		"remote_dir" : remote_dir,
Example #22
0
def main(args):
    version = os.getenv("HIVE_VERSION")
    if not version:
        version = strftime("%d%b%Y", gmtime())
    home = os.getenv("HIVE_HOME")
    output = "llap-slider-%(version)s" % ({"version": version})
    parser = argparse.ArgumentParser()
    parser.add_argument("--instances", type=int, default=1)
    parser.add_argument("--output", default=output)
    parser.add_argument("--input", required=True)
    parser.add_argument("--args", default="")
    parser.add_argument("--name", default="llap0")
    parser.add_argument("--loglevel", default="INFO")
    parser.add_argument("--chaosmonkey", type=int, default=0)
    parser.add_argument("--slider-keytab-dir", default="")
    parser.add_argument("--slider-keytab", default="")
    parser.add_argument("--slider-principal", default="")
    parser.add_argument("--slider-default-keytab",
                        dest='slider_default_keytab',
                        action='store_true')
    parser.set_defaults(slider_default_keytab=False)
    # Unneeded here for now: parser.add_argument("--hiveconf", action='append')
    #parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
    (args, unknown_args) = parser.parse_known_args(args)
    input = args.input
    output = args.output
    slider_keytab_dir = args.slider_keytab_dir
    slider_keytab = args.slider_keytab
    slider_principal = args.slider_principal
    # set the defaults only if the defaults are enabled
    if args.slider_default_keytab:
        if not slider_keytab_dir:
            slider_keytab_dir = ".slider/keytabs/llap"
        if not slider_keytab:
            slider_keytab = "llap.keytab"
        if not slider_principal:
            slider_principal = "*****@*****.**"
    if not input:
        print "Cannot find input files"
        sys.exit(1)
        return
    config = json_parse(open(join(input, "config.json")).read())
    java_home = config["java.home"]
    resource = LlapResource(config)
    # 5% container failure every monkey_interval seconds
    monkey_percentage = 5  # 5%
    vars = {
        "home": home,
        "version": version,
        "instances": args.instances,
        "heap": resource.heap_size,
        "container.mb": resource.container_size,
        "container.cores": resource.container_cores,
        "hadoop_home": os.getenv("HADOOP_HOME"),
        "java_home": java_home,
        "name": args.name,
        "daemon_args": args.args,
        "daemon_loglevel": args.loglevel,
        "monkey_interval": args.chaosmonkey,
        "monkey_percentage": monkey_percentage,
        "monkey_enabled": args.chaosmonkey > 0,
        "slider_keytab_dir": slider_keytab_dir,
        "slider_keytab": slider_keytab,
        "slider_principal": slider_principal
    }

    if not exists(output):
        os.makedirs(output)

    src = join(home, "scripts", "llap", "bin")
    dst = join(input, "bin")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    # Make the zip package
    tmp = join(output, "tmp")
    pkg = join(tmp, "package")

    src = join(home, "scripts", "llap", "slider")
    dst = join(pkg, "scripts")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    with open(join(tmp, "metainfo.xml"), "w") as f:
        f.write(metainfo % vars)

    os.mkdir(join(pkg, "files"))
    tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" % version),
                           "w:gz")
    # recursive add + -C chdir inside
    tarball.add(input, "")
    tarball.close()

    zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w")
    zipdir(tmp, zipped)
    zipped.close()

    # cleanup after making zip pkg
    shutil.rmtree(tmp)

    with open(join(output, "appConfig.json"), "w") as f:
        f.write(appConfig % vars)

    with open(join(output, "resources.json"), "w") as f:
        f.write(resources % vars)

    with open(join(output, "run.sh"), "w") as f:
        f.write(runner % vars)
    os.chmod(join(output, "run.sh"), 0700)

    print "Prepared %s/run.sh for running LLAP on Slider" % (output)
Example #23
0
def main(args):
    opts, args = getopt(args, "", [
        "instances=", "output=", "input=", "args=", "name=", "loglevel=",
        "chaosmonkey=", "size=", "xmx=", "cache=", "executors=", "hiveconf="
    ])
    version = os.getenv("HIVE_VERSION")
    if not version:
        version = strftime("%d%b%Y", gmtime())
    home = os.getenv("HIVE_HOME")
    output = "llap-slider-%(version)s" % ({"version": version})
    instances = 1
    name = "llap0"
    d_args = ""
    d_loglevel = "INFO"
    input = None
    monkey = "0"
    for k, v in opts:
        if k in ("--input"):
            input = v
        elif k in ("--output"):
            output = v
        elif k in ("--instances"):
            instances = int(v)
        elif k in ("--name"):
            name = v
        elif k in ("--args"):
            d_args = v
        elif k in ("--loglevel"):
            d_loglevel = v
        elif k in ("--chaosmonkey"):
            monkey = v
    if not input:
        print "Cannot find input files"
        sys.exit(1)
        return
    config = json_parse(open(join(input, "config.json")).read())
    resource = LlapResource(config)
    monkey_interval = int(monkey)
    # 5% container failure every monkey_interval seconds
    monkey_percentage = 5  # 5%
    vars = {
        "home": home,
        "version": version,
        "instances": instances,
        "heap": resource.heap_size,
        "container.mb": resource.container_size,
        "container.cores": resource.container_cores,
        "hadoop_home": os.getenv("HADOOP_HOME"),
        "java_home": os.getenv("JAVA_HOME"),
        "name": name,
        "daemon_args": d_args,
        "daemon_loglevel": d_loglevel,
        "monkey_interval": monkey_interval,
        "monkey_percentage": monkey_percentage,
        "monkey_enabled": monkey_interval > 0
    }

    if not exists(output):
        os.makedirs(output)

    src = join(home, "scripts", "llap", "bin")
    dst = join(input, "bin")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    # Make the zip package
    tmp = join(output, "tmp")
    pkg = join(tmp, "package")

    src = join(home, "scripts", "llap", "slider")
    dst = join(pkg, "scripts")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    with open(join(tmp, "metainfo.xml"), "w") as f:
        f.write(metainfo % vars)

    os.mkdir(join(pkg, "files"))
    tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" % version),
                           "w:gz")
    # recursive add + -C chdir inside
    tarball.add(input, "")
    tarball.close()

    zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w")
    zipdir(tmp, zipped)
    zipped.close()

    # cleanup after making zip pkg
    shutil.rmtree(tmp)

    with open(join(output, "appConfig.json"), "w") as f:
        f.write(appConfig % vars)

    with open(join(output, "resources.json"), "w") as f:
        f.write(resources % vars)

    with open(join(output, "run.sh"), "w") as f:
        f.write(runner % vars)
    os.chmod(join(output, "run.sh"), 0700)

    print "Prepared %s/run.sh for running LLAP on Slider" % (output)
Example #24
0
def main(args):
    opts, args = getopt(
        args,
        "",
        [
            "instances=",
            "output=",
            "input=",
            "args=",
            "name=",
            "loglevel=",
            "chaosmonkey=",
            "size=",
            "xmx=",
            "cache=",
            "executors=",
            "hiveconf=",
        ],
    )
    version = os.getenv("HIVE_VERSION")
    if not version:
        version = strftime("%d%b%Y", gmtime())
    home = os.getenv("HIVE_HOME")
    output = "llap-slider-%(version)s" % ({"version": version})
    instances = 1
    name = "llap0"
    d_args = ""
    d_loglevel = "INFO"
    input = None
    monkey = "0"
    for k, v in opts:
        if k in ("--input"):
            input = v
        elif k in ("--output"):
            output = v
        elif k in ("--instances"):
            instances = int(v)
        elif k in ("--name"):
            name = v
        elif k in ("--args"):
            d_args = v
        elif k in ("--loglevel"):
            d_loglevel = v
        elif k in ("--chaosmonkey"):
            monkey = v
    if not input:
        print "Cannot find input files"
        sys.exit(1)
        return
    config = json_parse(open(join(input, "config.json")).read())
    resource = LlapResource(config)
    monkey_interval = int(monkey)
    # 5% container failure every monkey_interval seconds
    monkey_percentage = 5  # 5%
    vars = {
        "home": home,
        "version": version,
        "instances": instances,
        "heap": resource.heap_size,
        "container.mb": resource.container_size,
        "container.cores": resource.container_cores,
        "hadoop_home": os.getenv("HADOOP_HOME"),
        "java_home": os.getenv("JAVA_HOME"),
        "name": name,
        "daemon_args": d_args,
        "daemon_loglevel": d_loglevel,
        "monkey_interval": monkey_interval,
        "monkey_percentage": monkey_percentage,
        "monkey_enabled": monkey_interval > 0,
    }

    if not exists(output):
        os.makedirs(output)

    src = join(home, "scripts", "llap", "bin")
    dst = join(input, "bin")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    # Make the zip package
    tmp = join(output, "tmp")
    pkg = join(tmp, "package")

    src = join(home, "scripts", "llap", "slider")
    dst = join(pkg, "scripts")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    with open(join(tmp, "metainfo.xml"), "w") as f:
        f.write(metainfo % vars)

    os.mkdir(join(pkg, "files"))
    tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" % version), "w:gz")
    # recursive add + -C chdir inside
    tarball.add(input, "")
    tarball.close()

    zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w")
    zipdir(tmp, zipped)
    zipped.close()

    # cleanup after making zip pkg
    shutil.rmtree(tmp)

    with open(join(output, "appConfig.json"), "w") as f:
        f.write(appConfig % vars)

    with open(join(output, "resources.json"), "w") as f:
        f.write(resources % vars)

    with open(join(output, "run.sh"), "w") as f:
        f.write(runner % vars)
    os.chmod(join(output, "run.sh"), 0700)

    print "Prepared %s/run.sh for running LLAP on Slider" % (output)
Example #25
0
backups = sorted([basename(path) for path in glob(f"{data_('b*')}")],
                 key=sort_backup,
                 reverse=True)
stg = Settings()
settings_keys = [key for key in dir(stg) if not key.startswith('__')]


def save_settings():
    with res('settings.json', 'w+') as file:
        file.write(json_str(dtc_asdict(stg), indent='\t', sort_keys=True))
    print('Settings saved successfully.')


try:
    with res('settings.json') as file:
        try:
            settings_dict = json_parse(file)
            for key in settings_dict:
                if hasattr(stg, key):
                    setattr(stg, key, settings_dict[key])
                else:
                    print(
                        f"'{key}' is not a settings property. It was ignored.")
            del settings_dict
        except Exception:
            pass
except OSError:
    print('Settings not found. Creating default configuration.')
    save_settings()
Example #26
0
def main(args):
	version = os.getenv("HIVE_VERSION")
	if not version:
		version = strftime("%d%b%Y", gmtime()) 
	home = os.getenv("HIVE_HOME")
	output = "llap-slider-%(version)s" % ({"version": version})
	parser = argparse.ArgumentParser()
	parser.add_argument("--instances", type=int, default=1)
	parser.add_argument("--output", default=output)
	parser.add_argument("--input", required=True)
	parser.add_argument("--args", default="")
	parser.add_argument("--name", default="llap0")
	parser.add_argument("--loglevel", default="INFO")
	parser.add_argument("--logger", default="RFA")
	parser.add_argument("--chaosmonkey", type=int, default=0)
	parser.add_argument("--slider-am-container-mb", type=int, default=1024)
	parser.add_argument("--slider-keytab-dir", default="")
	parser.add_argument("--slider-keytab", default="")
	parser.add_argument("--slider-principal", default="")
	parser.add_argument("--slider-default-keytab", dest='slider_default_keytab', action='store_true')
	parser.set_defaults(slider_default_keytab=False)
	parser.add_argument("--slider-placement", type=int, default=4)
	# Unneeded here for now: parser.add_argument("--hiveconf", action='append')
	#parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
	(args, unknown_args) = parser.parse_known_args(args)
	input = args.input
	output = args.output
	slider_am_jvm_heapsize = max(args.slider_am_container_mb * 0.8, args.slider_am_container_mb - 1024)
	slider_keytab_dir = args.slider_keytab_dir
	slider_keytab = args.slider_keytab
	slider_principal = args.slider_principal
	# set the defaults only if the defaults are enabled
	if args.slider_default_keytab:
		if not slider_keytab_dir:
			slider_keytab_dir = ".slider/keytabs/llap"
		if not slider_keytab:
			slider_keytab = "llap.keytab"
		if not slider_principal:
			slider_principal = "*****@*****.**"
	if not input:
		print "Cannot find input files"
		sys.exit(1)
		return
	config = json_parse(open(join(input, "config.json")).read())
	java_home = config["java.home"]
	max_direct_memory = config["max_direct_memory"]
	daemon_args = args.args
	if long(max_direct_memory) > 0:
		daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory, daemon_args)
	resource = LlapResource(config)
	# 5% container failure every monkey_interval seconds
	monkey_percentage = 5 # 5%
	vars = {
		"home" : home,
		"version" : version,
		"instances" : args.instances,
		"heap" : resource.heap_size,
		"container.mb" : resource.container_size,
		"container.cores" : resource.container_cores,
		"hadoop_home" : os.getenv("HADOOP_HOME"),
		"java_home" : java_home,
		"name" : resource.clusterName,
		"daemon_args" : daemon_args,
		"daemon_loglevel" : args.loglevel,
		"daemon_logger" : args.logger,
		"queue.string" : resource.queueString,
		"monkey_interval" : args.chaosmonkey,
		"monkey_percentage" : monkey_percentage,
		"monkey_enabled" : args.chaosmonkey > 0,
		"slider.am.container.mb" : args.slider_am_container_mb,
		"slider_am_jvm_heapsize" : slider_am_jvm_heapsize,
		"slider_keytab_dir" : slider_keytab_dir,
		"slider_keytab" : slider_keytab,
		"slider_principal" : slider_principal,
		"placement" : args.slider_placement
	}
	
	if not exists(output):
		os.makedirs(output)
	
	src = join(home, "scripts", "llap", "bin")
	dst = join(input, "bin")
	if exists(dst):
		shutil.rmtree(dst)
	shutil.copytree(src, dst)


	# Make the zip package
	tmp = join(output, "tmp")
	pkg = join(tmp, "package")

	src = join(home, "scripts", "llap", "slider")
	dst = join(pkg, "scripts")
	if exists(dst):
		shutil.rmtree(dst)
	shutil.copytree(src, dst)

	with open(join(tmp, "metainfo.xml"),"w") as f:
		f.write(metainfo % vars)

	os.mkdir(join(pkg, "files"))
	tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" %  version), "w:gz")
	# recursive add + -C chdir inside
	tarball.add(input, "")
	tarball.close()

	zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w")
	zipdir(tmp, zipped)
	zipped.close()

	# cleanup after making zip pkg
	shutil.rmtree(tmp)

	with open(join(output, "appConfig.json"), "w") as f:
		f.write(appConfig % vars)
	
	with open(join(output, "resources.json"), "w") as f:
		f.write(resources % vars)

	with open(join(output, "run.sh"), "w") as f:
		f.write(runner % vars)
	os.chmod(join(output, "run.sh"), 0700)

	print "Prepared %s/run.sh for running LLAP on Slider" % (output)
Example #27
0
def main(args):
	version = os.getenv("HIVE_VERSION")
	if not version:
		version = strftime("%d%b%Y", gmtime()) 
	home = os.getenv("HIVE_HOME")
	output = "llap-yarn-%(version)s" % ({"version": version})
	parser = argparse.ArgumentParser()
	parser.add_argument("--instances", type=int, default=1)
	parser.add_argument("--output", default=output)
	parser.add_argument("--input", required=True)
	parser.add_argument("--args", default="")
	parser.add_argument("--name", default="llap0")
	parser.add_argument("--user", default="hive")
	parser.add_argument("--loglevel", default="INFO")
	parser.add_argument("--logger", default="query-routing")
	parser.add_argument("--service-am-container-mb", type=int, default=1024)
	parser.add_argument("--service-appconfig-global", nargs='*', type=service_appconfig_global_property, action='append')
	parser.add_argument("--service-keytab-dir", default="")
	parser.add_argument("--service-keytab", default="")
	parser.add_argument("--service-principal", default="")
	parser.add_argument("--service-default-keytab", dest='service_default_keytab', action='store_true')
	parser.add_argument("--service-placement", type=int, default=4)
	parser.add_argument("--health-percent", type=int, default=80)
	parser.add_argument("--health-time-window-secs", type=int, default=300)
	parser.add_argument("--health-init-delay-secs", type=int, default=400)
	parser.set_defaults(service_default_keytab=False)
	parser.add_argument("--startImmediately", dest='start_immediately', action='store_true')
	parser.add_argument("--javaChild", dest='java_child', action='store_true')
	parser.set_defaults(start_immediately=False)
	parser.set_defaults(java_child=False)
	# Unneeded here for now: parser.add_argument("--hiveconf", action='append')
	#parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
	(args, unknown_args) = parser.parse_known_args(args)
	if args.start_immediately and not args.java_child:
		sys.exit(0)
		return
	if args.java_child:
		print "%s Running as a child of LlapServiceDriver" % (strftime("%H:%M:%S", gmtime()))
	else:
		print "%s Running after LlapServiceDriver" % (strftime("%H:%M:%S", gmtime()))

	input = args.input
	output = args.output
	user = args.user
	service_am_jvm_heapsize = max(args.service_am_container_mb * 0.8, args.service_am_container_mb - 1024)
	service_keytab_dir = args.service_keytab_dir
	service_keytab = args.service_keytab
	service_principal = args.service_principal
	# set the defaults only if the defaults are enabled
	if args.service_default_keytab:
		if not service_keytab_dir:
			service_keytab_dir = ".yarn/keytabs/llap"
		if not service_keytab:
			service_keytab = "llap.keytab"
		if not service_principal:
			service_principal = "*****@*****.**"
	service_keytab_path = service_keytab_dir
	if service_keytab_path:
		if service_keytab:
			service_keytab_path += "/" + service_keytab
	else:
		service_keytab_path = service_keytab

	if not input:
		print "Cannot find input files"
		sys.exit(1)
		return
	config = json_parse(open(join(input, "config.json")).read())
	java_home = config["java.home"]
	max_direct_memory = config["max_direct_memory"]

	resource = LlapResource(config)

	daemon_args = args.args
	if long(max_direct_memory) > 0:
		daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory, daemon_args)
	daemon_args = " -Dhttp.maxConnections=%s %s" % ((max(args.instances, resource.executors) + 1), daemon_args)
	vars = {
		"home" : home,
		"version" : version,
		"instances" : args.instances,
		"heap" : resource.heap_size,
		"container.mb" : resource.container_size,
		"container.cores" : resource.container_cores,
		"hadoop_home" : os.getenv("HADOOP_HOME"),
		"java_home" : java_home,
		"name" : resource.clusterName,
		"daemon_args" : daemon_args,
		"daemon_loglevel" : args.loglevel,
		"daemon_logger" : args.logger,
		"queue.string" : resource.queueString,
		"service.am.container.mb" : args.service_am_container_mb,
		"service_appconfig_global_append": construct_service_site_global_string(args.service_appconfig_global),
		"service_am_jvm_heapsize" : service_am_jvm_heapsize,
		"service_keytab_path" : service_keytab_path,
		"service_principal" : service_principal,
		"placement" : args.service_placement,
		"health_percent": args.health_percent,
		"health_time_window": args.health_time_window_secs,
		"health_init_delay": args.health_init_delay_secs
	}
	
	if not exists(output):
		os.makedirs(output)
	
	src = join(home, "scripts", "llap", "bin")
	dst = join(input, "bin")
	if exists(dst):
		shutil.rmtree(dst)
	shutil.copytree(src, dst)

	# Make the llap tarball
	print "%s Prepared the files" % (strftime("%H:%M:%S", gmtime()))

	tarball = tarfile.open(join(output, "llap-%s.tar.gz" %  version), "w:gz")
	# recursive add + -C chdir inside
	tarball.add(input, "")
	tarball.close()

	print "%s Packaged the files" % (strftime("%H:%M:%S", gmtime()))

	with open(join(output, "Yarnfile"), "w") as f:
		f.write(yarnfile % vars)

	with open(join(output, "run.sh"), "w") as f:
		f.write(runner % vars)
	os.chmod(join(output, "run.sh"), 0700)

	if not args.java_child:
		print "%s Prepared %s/run.sh for running LLAP on YARN" % (strftime("%H:%M:%S", gmtime()), output)
Example #28
0
 def make_request(self, url):
     try:
         r = requests.get(url, headers={"Client-id": CLIENT_ID})
     except RequestException:
         cprint_red("Bad internet detected, exiting program")
     return json_parse(r.text)
Example #29
0
def main(args):
	version = os.getenv("HIVE_VERSION")
	if not version:
		version = strftime("%d%b%Y", gmtime()) 
	home = os.getenv("HIVE_HOME")
	output = "llap-slider-%(version)s" % ({"version": version})
	parser = argparse.ArgumentParser()
	parser.add_argument("--instances", type=int, default=1)
	parser.add_argument("--output", default=output)
	parser.add_argument("--input", required=True)
	parser.add_argument("--args", default="")
	parser.add_argument("--name", default="llap0")
	parser.add_argument("--loglevel", default="INFO")
	parser.add_argument("--chaosmonkey", type=int, default="0")
	# Unneeded here for now: parser.add_argument("--hiveconf", action='append')
	#parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
	(args, unknown_args) = parser.parse_known_args(args)
	input = args.input
	output = args.output
	if not input:
		print "Cannot find input files"
		sys.exit(1)
		return
	config = json_parse(open(join(input, "config.json")).read())
	resource = LlapResource(config)
	# 5% container failure every monkey_interval seconds
	monkey_percentage = 5 # 5%
	vars = {
		"home" : home,
		"version" : version,
		"instances" : args.instances,
		"heap" : resource.heap_size,
		"container.mb" : resource.container_size,
		"container.cores" : resource.container_cores,
		"hadoop_home" : os.getenv("HADOOP_HOME"),
		"java_home" : os.getenv("JAVA_HOME"),
		"name" : args.name,
		"daemon_args" : args.args,
		"daemon_loglevel" : args.loglevel,
		"monkey_interval" : args.chaosmonkey,
		"monkey_percentage" : monkey_percentage,
		"monkey_enabled" : args.chaosmonkey > 0
	}
	
	if not exists(output):
		os.makedirs(output)
	
	src = join(home, "scripts", "llap", "bin")
	dst = join(input, "bin")
	if exists(dst):
		shutil.rmtree(dst)
	shutil.copytree(src, dst)


	# Make the zip package
	tmp = join(output, "tmp")
	pkg = join(tmp, "package")

	src = join(home, "scripts", "llap", "slider")
	dst = join(pkg, "scripts")
	if exists(dst):
		shutil.rmtree(dst)
	shutil.copytree(src, dst)

	with open(join(tmp, "metainfo.xml"),"w") as f:
		f.write(metainfo % vars)

	os.mkdir(join(pkg, "files"))
	tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" %  version), "w:gz")
	# recursive add + -C chdir inside
	tarball.add(input, "")
	tarball.close()

	zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w")
	zipdir(tmp, zipped)
	zipped.close()

	# cleanup after making zip pkg
	shutil.rmtree(tmp)

	with open(join(output, "appConfig.json"), "w") as f:
		f.write(appConfig % vars)
	
	with open(join(output, "resources.json"), "w") as f:
		f.write(resources % vars)

	with open(join(output, "run.sh"), "w") as f:
		f.write(runner % vars)
	os.chmod(join(output, "run.sh"), 0700)

	print "Prepared %s/run.sh for running LLAP on Slider" % (output)
Example #30
0
def main(args):
    version = os.getenv("HIVE_VERSION")
    if not version:
        version = strftime("%d%b%Y", gmtime())
    home = os.getenv("HIVE_HOME")
    output = "llap-slider-%(version)s" % ({"version": version})
    parser = argparse.ArgumentParser()
    parser.add_argument("--instances", type=int, default=1)
    parser.add_argument("--output", default=output)
    parser.add_argument("--input", required=True)
    parser.add_argument("--args", default="")
    parser.add_argument("--name", default="llap0")
    parser.add_argument("--loglevel", default="INFO")
    parser.add_argument("--logger", default="query-routing")
    parser.add_argument("--chaosmonkey", type=int, default=0)
    parser.add_argument("--slider-am-container-mb", type=int, default=1024)
    parser.add_argument("--slider-appconfig-global",
                        nargs='*',
                        type=slider_appconfig_global_property,
                        action='append')
    parser.add_argument("--slider-keytab-dir", default="")
    parser.add_argument("--slider-keytab", default="")
    parser.add_argument("--slider-principal", default="")
    parser.add_argument("--slider-default-keytab",
                        dest='slider_default_keytab',
                        action='store_true')
    parser.add_argument("--slider-placement", type=int, default=4)
    parser.set_defaults(slider_default_keytab=False)
    parser.add_argument("--startImmediately",
                        dest='start_immediately',
                        action='store_true')
    parser.add_argument("--javaChild", dest='java_child', action='store_true')
    parser.set_defaults(start_immediately=False)
    parser.set_defaults(java_child=False)
    # Unneeded here for now: parser.add_argument("--hiveconf", action='append')
    #parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
    (args, unknown_args) = parser.parse_known_args(args)
    if args.start_immediately and not args.java_child:
        sys.exit(0)
        return
    if args.java_child:
        print "%s Running as a child of LlapServiceDriver" % (strftime(
            "%H:%M:%S", gmtime()))
    else:
        print "%s Running after LlapServiceDriver" % (strftime(
            "%H:%M:%S", gmtime()))

    input = args.input
    output = args.output
    slider_am_jvm_heapsize = max(args.slider_am_container_mb * 0.8,
                                 args.slider_am_container_mb - 1024)
    slider_keytab_dir = args.slider_keytab_dir
    slider_keytab = args.slider_keytab
    slider_principal = args.slider_principal
    # set the defaults only if the defaults are enabled
    if args.slider_default_keytab:
        if not slider_keytab_dir:
            slider_keytab_dir = ".slider/keytabs/llap"
        if not slider_keytab:
            slider_keytab = "llap.keytab"
        if not slider_principal:
            slider_principal = "*****@*****.**"
    if not input:
        print "Cannot find input files"
        sys.exit(1)
        return
    config = json_parse(open(join(input, "config.json")).read())
    java_home = config["java.home"]
    max_direct_memory = config["max_direct_memory"]
    daemon_args = args.args
    if long(max_direct_memory) > 0:
        daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory,
                                                          daemon_args)
    resource = LlapResource(config)
    # 5% container failure every monkey_interval seconds
    monkey_percentage = 5  # 5%
    vars = {
        "home":
        home,
        "version":
        version,
        "instances":
        args.instances,
        "heap":
        resource.heap_size,
        "container.mb":
        resource.container_size,
        "container.cores":
        resource.container_cores,
        "hadoop_home":
        os.getenv("HADOOP_HOME"),
        "java_home":
        java_home,
        "name":
        resource.clusterName,
        "daemon_args":
        daemon_args,
        "daemon_loglevel":
        args.loglevel,
        "daemon_logger":
        args.logger,
        "queue.string":
        resource.queueString,
        "monkey_interval":
        args.chaosmonkey,
        "monkey_percentage":
        monkey_percentage,
        "monkey_enabled":
        args.chaosmonkey > 0,
        "slider.am.container.mb":
        args.slider_am_container_mb,
        "slider_appconfig_global_append":
        construct_slider_site_global_string(args.slider_appconfig_global),
        "slider_am_jvm_heapsize":
        slider_am_jvm_heapsize,
        "slider_keytab_dir":
        slider_keytab_dir,
        "slider_keytab":
        slider_keytab,
        "slider_principal":
        slider_principal,
        "placement":
        args.slider_placement
    }

    if not exists(output):
        os.makedirs(output)

    src = join(home, "scripts", "llap", "bin")
    dst = join(input, "bin")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    # Make the zip package
    tmp = join(output, "tmp")
    pkg = join(tmp, "package")

    src = join(home, "scripts", "llap", "slider")
    dst = join(pkg, "scripts")
    if exists(dst):
        shutil.rmtree(dst)
    shutil.copytree(src, dst)

    with open(join(tmp, "metainfo.xml"), "w") as f:
        f.write(metainfo % vars)

    os.mkdir(join(pkg, "files"))
    print "%s Prepared the files" % (strftime("%H:%M:%S", gmtime()))

    tarball = tarfile.open(join(pkg, "files", "llap-%s.tar.gz" % version),
                           "w:gz")
    # recursive add + -C chdir inside
    tarball.add(input, "")
    tarball.close()

    zipped = zipfile.ZipFile(join(output, "llap-%s.zip" % version), "w")
    zipdir(tmp, zipped)
    zipped.close()
    print "%s Packaged the files" % (strftime("%H:%M:%S", gmtime()))

    # cleanup after making zip pkg
    shutil.rmtree(tmp)

    with open(join(output, "appConfig.json"), "w") as f:
        f.write(appConfig % vars)

    with open(join(output, "resources.json"), "w") as f:
        f.write(resources % vars)

    with open(join(output, "run.sh"), "w") as f:
        f.write(runner % vars)
    os.chmod(join(output, "run.sh"), 0700)

    if not args.java_child:
        print "%s Prepared %s/run.sh for running LLAP on Slider" % (strftime(
            "%H:%M:%S", gmtime()), output)
Example #31
0
def main(args):
	version = os.getenv("HIVE_VERSION")
	if not version:
		version = strftime("%d%b%Y", gmtime()) 
	home = os.getenv("HIVE_HOME")
	output = "llap-yarn-%(version)s" % ({"version": version})
	parser = argparse.ArgumentParser()
	parser.add_argument("--instances", type=int, default=1)
	parser.add_argument("--output", default=output)
	parser.add_argument("--input", required=True)
	parser.add_argument("--args", default="")
	parser.add_argument("--name", default="llap0")
	parser.add_argument("--loglevel", default="INFO")
	parser.add_argument("--logger", default="query-routing")
	parser.add_argument("--service-am-container-mb", type=int, default=1024)
	parser.add_argument("--service-appconfig-global", nargs='*', type=service_appconfig_global_property, action='append')
	parser.add_argument("--service-keytab-dir", default="")
	parser.add_argument("--service-keytab", default="")
	parser.add_argument("--service-principal", default="")
	parser.add_argument("--service-default-keytab", dest='service_default_keytab', action='store_true')
	parser.add_argument("--service-placement", type=int, default=4)
	parser.add_argument("--health-percent", type=int, default=80)
	parser.add_argument("--health-time-window-secs", type=int, default=300)
	parser.add_argument("--health-init-delay-secs", type=int, default=400)
	parser.set_defaults(service_default_keytab=False)
	parser.add_argument("--startImmediately", dest='start_immediately', action='store_true')
	parser.add_argument("--javaChild", dest='java_child', action='store_true')
	parser.set_defaults(start_immediately=False)
	parser.set_defaults(java_child=False)
	# Unneeded here for now: parser.add_argument("--hiveconf", action='append')
	#parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
	(args, unknown_args) = parser.parse_known_args(args)
	if args.start_immediately and not args.java_child:
		sys.exit(0)
		return
	if args.java_child:
		print "%s Running as a child of LlapServiceDriver" % (strftime("%H:%M:%S", gmtime()))
	else:
		print "%s Running after LlapServiceDriver" % (strftime("%H:%M:%S", gmtime()))

	input = args.input
	output = args.output
	service_am_jvm_heapsize = max(args.service_am_container_mb * 0.8, args.service_am_container_mb - 1024)
	service_keytab_dir = args.service_keytab_dir
	service_keytab = args.service_keytab
	service_principal = args.service_principal
	# set the defaults only if the defaults are enabled
	if args.service_default_keytab:
		if not service_keytab_dir:
			service_keytab_dir = ".yarn/keytabs/llap"
		if not service_keytab:
			service_keytab = "llap.keytab"
		if not service_principal:
			service_principal = "*****@*****.**"
	service_keytab_path = service_keytab_dir
	if service_keytab_path:
		if service_keytab:
			service_keytab_path += "/" + service_keytab
	else:
		service_keytab_path = service_keytab
	if service_keytab_path:
		service_keytab_path = "hdfs:///user/hive/" + service_keytab_path

	if not input:
		print "Cannot find input files"
		sys.exit(1)
		return
	config = json_parse(open(join(input, "config.json")).read())
	java_home = config["java.home"]
	max_direct_memory = config["max_direct_memory"]

	resource = LlapResource(config)

	daemon_args = args.args
	if long(max_direct_memory) > 0:
		daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory, daemon_args)
	daemon_args = " -Dhttp.maxConnections=%s %s" % ((max(args.instances, resource.executors) + 1), daemon_args)
	vars = {
		"home" : home,
		"version" : version,
		"instances" : args.instances,
		"heap" : resource.heap_size,
		"container.mb" : resource.container_size,
		"container.cores" : resource.container_cores,
		"hadoop_home" : os.getenv("HADOOP_HOME"),
		"java_home" : java_home,
		"name" : resource.clusterName,
		"daemon_args" : daemon_args,
		"daemon_loglevel" : args.loglevel,
		"daemon_logger" : args.logger,
		"queue.string" : resource.queueString,
		"service.am.container.mb" : args.service_am_container_mb,
		"service_appconfig_global_append": construct_service_site_global_string(args.service_appconfig_global),
		"service_am_jvm_heapsize" : service_am_jvm_heapsize,
		"service_keytab_path" : service_keytab_path,
		"service_principal" : service_principal,
		"placement" : args.service_placement,
		"health_percent": args.health_percent,
		"health_time_window": args.health_time_window_secs,
		"health_init_delay": args.health_init_delay_secs
	}
	
	if not exists(output):
		os.makedirs(output)
	
	src = join(home, "scripts", "llap", "bin")
	dst = join(input, "bin")
	if exists(dst):
		shutil.rmtree(dst)
	shutil.copytree(src, dst)

	# Make the llap tarball
	print "%s Prepared the files" % (strftime("%H:%M:%S", gmtime()))

	tarball = tarfile.open(join(output, "llap-%s.tar.gz" %  version), "w:gz")
	# recursive add + -C chdir inside
	tarball.add(input, "")
	tarball.close()

	print "%s Packaged the files" % (strftime("%H:%M:%S", gmtime()))

	with open(join(output, "Yarnfile"), "w") as f:
		f.write(yarnfile % vars)

	with open(join(output, "run.sh"), "w") as f:
		f.write(runner % vars)
	os.chmod(join(output, "run.sh"), 0700)

	if not args.java_child:
		print "%s Prepared %s/run.sh for running LLAP on YARN" % (strftime("%H:%M:%S", gmtime()), output)