def rollback(ctx, appname, cluster, revision): appname = get_appname(appname=appname) kae = ctx.obj['kae_api'] kae.set_cluster(cluster) with handle_console_err(): kae.rollback(appname, revision) click.echo(info('Rollback %s(revision %s) done.' % (appname, revision)))
def set_secret(ctx, appname, cluster, f, literal, replace): data = None if literal: try: data = json.loads(literal) except Exception: fatal( "can't load secret from literal, pls ensure it's a valid json") try: if f: with open(f, "r") as fp: data = json.load(fp) except Exception: fatal("can't read secret data from {}".format(f)) if data is None: fatal("you must specify literal or filename") for k, v in data.items(): if not isinstance(v, str): fatal("value of secret must be string") if not isinstance(k, str): fatal("key of secret must be string") kae = ctx.obj['kae_api'] kae.set_cluster(cluster) appname = get_appname(appname=appname) with handle_console_err(): d = kae.set_secret(appname, data, replace) click.echo(info(str(d)))
def register_release(ctx, appname, tag, git, f, literal, force): appname = get_appname(appname=appname) tag = get_git_tag(git_tag=tag) specs_text = get_specs_text(f, literal) if specs_text is None: errmsg = [ "specs_text is required, please use one of the instructions to specify it.", "1. specify --literal or -f in coomand line", "2. make the current workdir in the source code dir which contains app.yaml" ] fatal('\n'.join(errmsg)) kae = ctx.obj['kae_api'] git = git or get_remote_url(remote=ctx.obj['remotename']) if not git: fatal("git url not found, please check repository or pass argument") branch = get_current_branch() with handle_console_err(): kae.register_release(appname, tag, git, specs_text, branch=branch, force=force) click.echo(info('Register %s %s %s done.' % (appname, tag, git)))
def kill_build_task(ctx, appname): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) with handle_console_err(): kae.kill_build_task(appname) click.echo(info('Kill app %s\'s build task done.' % (appname, )))
def get_config(ctx, appname, cluster): kae = ctx.obj['kae_api'] kae.set_cluster(cluster) appname = get_appname(appname=appname) with handle_console_err(): d = kae.get_config(appname) newest = d.get("newest") current = d.get("current") click.echo(info("------------------ newest ----------------------")) if isinstance(newest, dict): for k, v in newest.items(): click.echo(info("key:")) click.echo(k) click.echo(info("data:")) click.echo(v) else: click.echo(newest) click.echo("\n\n") click.echo(info("----------------- current --------------------")) if isinstance(current, dict): for k, v in current.items(): click.echo(info("key:")) click.echo(k) click.echo(info("data:")) click.echo(v) else: click.echo(current)
def renew(ctx, appname, cluster): appname = get_appname(appname=appname) kae = ctx.obj['kae_api'] kae.set_cluster(cluster) with handle_console_err(): kae.renew(appname) click.echo(info('Renew %s done.' % (appname, )))
def delete_app(ctx, appname): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) with handle_console_err(): ret = kae.delete_app(appname) click.echo(info(json.dumps(ret)))
def list_sparkapp(ctx, raw): kae = ctx.obj['kae_api'] with handle_console_err(): sparkapps = kae.list_sparkapp() if raw: pprint(sparkapps) else: table = PrettyTable([ 'name', 'type', 'd-cores', 'd-memory', 'e-cores', 'e-memory', 'e-number', 'status', 'user', 'craeted', 'schedule', 'concurrency' ]) for r in sparkapps: specs_text = yaml.safe_load(r['specs_text']) table.add_row([ r['name'], specs_text['apptype'], specs_text['driver']['cpu'], specs_text['driver']['memory'], specs_text['executor']['cpu'], specs_text['executor']['memory'], specs_text['executor']['instances'], r['status'], r['nickname'], r['created'], specs_text.get('schedule', None), specs_text.get('concurrencyPolicy', None) ]) click.echo(table)
def get_secret(ctx, appname): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) with handle_console_err(): d = kae.get_secret(appname) ss = pprint.pformat(d) click.echo(info(ss))
def get_release_specs(ctx, appname, tag): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) tag = get_git_tag(git_tag=tag) with handle_console_err(): release = kae.get_release(appname, tag) click.echo(info(release['specs_text']))
def delete_app_canary(ctx, appname, cluster): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) kae.set_cluster(cluster) with handle_console_err(): ret = kae.delete_app_canary(appname) click.echo(info(json.dumps(ret)))
def set_app_abtesting_rules(ctx, appname, rules, cluster): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) rules = json.loads(rules) kae.set_cluster(cluster) with handle_console_err(): ret = kae.set_app_abtesting_rules(appname, rules) click.echo(info(json.dumps(ret)))
def get_job_log(ctx, jobname, follow): kae = ctx.obj['kae_api'] with handle_console_err(): if follow is False: result = kae.get_job_log(jobname) click.echo(result) else: resp = kae.get_job_log(jobname, follow) for m in resp: click.echo(m) click.echo(info('log end..'))
def get_app(ctx, appname, raw): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) with handle_console_err(): app = kae.get_app(appname) if raw: click.echo(str(app)) else: table = PrettyTable(['name', 'type', 'git', 'created']) table.align['git'] = 'l' table.add_row([app['name'], app['type'], app['git'], app['created']]) click.echo(table)
def get_config(ctx, appname): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) with handle_console_err(): d = kae.get_config(appname) if isinstance(d, dict): for k, v in d.items(): click.echo(info("key:")) click.echo(k) click.echo(info("data:")) click.echo(v) else: click.echo(d)
def list_job(ctx, raw): kae = ctx.obj['kae_api'] with handle_console_err(): jobs = kae.list_job() if raw: click.echo(str(jobs)) else: table = PrettyTable(['name', 'status', 'user', 'created']) for r in jobs: table.add_row( [r['name'], r['status'], r['nickname'], r['created']]) click.echo(table)
def create_job(ctx, jobname, git, f, literal): specs_text = get_specs_text(f, literal) kae = ctx.obj['kae_api'] git = git or get_remote_url(remote=ctx.obj['remotename']) if not git: fatal("git url not found, please check repository or pass argument") branch = get_current_branch() with handle_console_err(): kae.create_job(jobname=jobname, git=git, specs_text=specs_text, branch=branch) click.echo(info('Create job done.'))
def register_release(ctx, appname, tag, git, f, literal): appname = get_appname(appname=appname) tag = get_git_tag(git_tag=tag) specs_text = get_specs_text(f, literal) kae = ctx.obj['kae_api'] git = git or get_remote_url(remote=ctx.obj['remotename']) if not git: fatal("git url not found, please check repository or pass argument") branch = get_current_branch() with handle_console_err(): kae.register_release(appname, tag, git, specs_text, branch=branch) click.echo(info('Register %s %s %s done.' % (appname, tag, git)))
def get_app_releases(ctx, appname, raw): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) with handle_console_err(): releases = kae.get_app_releases(appname) if raw: click.echo(str(releases)) else: table = PrettyTable(['name', 'tag', 'created']) for r in releases: table.add_row([appname, r['tag'], r['created']]) click.echo(table)
def get_release(ctx, appname, tag, raw): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) tag = get_git_tag(git_tag=tag) with handle_console_err(): r = kae.get_release(appname, tag) if raw: click.echo(str(r)) else: table = PrettyTable(['name', 'tag', 'created']) table.add_row([appname, r['tag'], r['created']]) click.echo(table)
def set_config(ctx, appname, name, f, literal): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) if literal: data = literal elif f: try: with open(f, "r") as fp: data = fp.read() except Exception: fatal("can't read config data from {}".format(f)) with handle_console_err(): d = kae.set_config(appname, name, data) ss = pprint.pformat(d) click.echo(info(ss))
def upload(ctx, appname, files, type): upload_files = [] kae = ctx.obj['kae_api'] for f in files: if not os.path.exists(f): fatal('File {} not exist'.format(f)) else: upload_files.append(('file', (f, open(f, 'rb')))) with handle_console_err(): res = kae.upload(appname, type, upload_files) if res['error']: fatal(res['error']) click.echo(info('upload successful'))
def get_app_pods(ctx, appname, raw): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) with handle_console_err(): pods = kae.get_app_pods(appname) if raw: click.echo(str(pods)) else: table = PrettyTable(['name', 'status', 'ready']) for item in pods['items']: name = item['metadata']['name'] status = item['status']['phase'] ready = sum([ 1 if c_status['ready'] else 0 for c_status in item['status']['container_statuses'] ]) table.add_row([name, status, ready]) click.echo(table)
def delete_sparkapp(ctx, appname, f): kae = ctx.obj['kae_api'] if f: if not os.path.exists(f): fatal('The yaml file not exists!') else: data = read_yaml_file(f) if not data: fatal('yaml error') else: appname = data['appname'] with handle_console_err(): result = kae.delete_sparkapp(appname) if not result['error']: click.echo( info('Delete spark application {} successfully'.format(appname))) else: click.echo(error(result['error']))
def delete_job(ctx, jobname): kae = ctx.obj['kae_api'] with handle_console_err(): result = kae.delete_job(jobname) click.echo(str(result))
def restart_sparkapp(ctx, appname): kae = ctx.obj['kae_api'] with handle_console_err(): result = kae.restart_sparkapp(appname) click.echo(str(result))
def create_sparkapp(ctx, mainfile, arguments, f, appname, apptype, schedule, concurrency_policy, image, pythonversion, conf, sparkversion, mode, jars, files, py_files, packages, repositories, driver_memory, driver_cores, executor_memory, executor_cores, number_executors, selector, comment): kae = ctx.obj['kae_api'] data = {} required = ['appname', 'image', 'mainfile'] if f: if not os.path.exists(f): fatal('The yaml file not exists!') else: data = read_yaml_file(f) if not data: fatal('yaml error') else: if not data.get('driver'): data['driver'] = {'cpu': 1, 'memory': '512m'} if not data.get('executor'): data['executor'] = { 'cpu': 1, 'memory': '512m', 'instances': 1 } else: data = { 'appname': appname, 'apptype': apptype, 'image': image, 'pythonVersion': pythonversion, 'driver': { 'cpu': driver_cores, 'memory': driver_memory, }, 'executor': { 'cpu': executor_cores, 'memory': executor_memory, 'instances': number_executors }, 'mainfile': mainfile, 'comment': comment or '' } sparkConf = {} for item in conf: k, v = item.split('=') sparkConf[k] = v if sparkConf: data['sparkConf'] = sparkConf nodeSelector = {} for item in selector: k, v = item.split('=') nodeSelector[k] = v if nodeSelector: data['nodeSelector'] = nodeSelector if arguments: data['arguments'] = list(arguments) if data['apptype'] == 'scheduledsparkapplication': if not schedule: fatal('Scheduledsparkapplication must should spec `shedule`') else: data['schedule'] = schedule data['concurrencyPolicy'] = concurrency_policy if jars: data['jars'] = jars.split(',') if files: data['files'] = files.split(',') if py_files: data['py-files'] = py_files.split(',') for require_key in required: if require_key not in data.keys(): fatal('Miss required argument {}'.format(require_key)) data.setdefault('deps', {}) jars_obj = [] files_obj = [] pyfiles_obj = [] remote_jars = [] remote_files = [] remote_pyfiles = [] # file_type_map = { # 'mainfile': 'mainApplicationFile', # 'jars': 'jars', # 'files': 'files', # 'py-files': 'pyFiles' # } remote_file_protocol = ['s3a://', 'hdfs://'] def pre_upload(path): protocol = path.split('//')[0] + '//' if protocol in remote_file_protocol: return False if not os.path.exists(path): fatal('File {} not exist'.format(path)) return True if pre_upload(data['mainfile']): mainfile_obj = (('file', open(data['mainfile'], 'rb')), ) data['mainApplicationFile'] = kae.upload(data['appname'], 'mainfile', mainfile_obj)['data']['path'] else: data['mainApplicationFile'] = data['mainfile'] for jar_path in data.get('jars', []): if pre_upload(jar_path): jars_obj.append(('file', open(jar_path, 'rb'))) else: remote_jars.append(jar_path) for file_path in data.get('files', []): if pre_upload(file_path): files_obj.append(('file', open(file_path, 'rb'))) else: remote_files.append(file_path) for pyfile_path in data.get('py-files', []): if pre_upload(pyfile_path): pyfiles_obj.append(('file', open(pyfile_path, 'rb'))) else: remote_pyfiles.append(pyfile_path) data['deps']['jars'] = kae.upload(data['appname'], 'jars', jars_obj)['data']['path'] + remote_jars data['deps']['files'] = kae.upload( data['appname'], 'files', files_obj)['data']['path'] + remote_files data['deps']['pyFiles'] = kae.upload( data['appname'], 'pyfiles', pyfiles_obj)['data']['path'] + remote_pyfiles with handle_console_err(): kae.create_sparkapp(data=data) click.echo(info('Create sparkapp done.'))