def register_release(ctx, appname, tag, git, f, literal, force): appname = get_appname(appname=appname) tag = get_git_tag(git_tag=tag) specs_text = get_specs_text(f, literal) if specs_text is None: errmsg = [ "specs_text is required, please use one of the instructions to specify it.", "1. specify --literal or -f in coomand line", "2. make the current workdir in the source code dir which contains app.yaml" ] fatal('\n'.join(errmsg)) kae = ctx.obj['kae_api'] git = git or get_remote_url(remote=ctx.obj['remotename']) if not git: fatal("git url not found, please check repository or pass argument") branch = get_current_branch() with handle_console_err(): kae.register_release(appname, tag, git, specs_text, branch=branch, force=force) click.echo(info('Register %s %s %s done.' % (appname, tag, git)))
def pre_upload(path): protocol = path.split('//')[0] + '//' if protocol in remote_file_protocol: return False if not os.path.exists(path): fatal('File {} not exist'.format(path)) return True
def set_secret(ctx, appname, cluster, f, literal, replace): data = None if literal: try: data = json.loads(literal) except Exception: fatal( "can't load secret from literal, pls ensure it's a valid json") try: if f: with open(f, "r") as fp: data = json.load(fp) except Exception: fatal("can't read secret data from {}".format(f)) if data is None: fatal("you must specify literal or filename") for k, v in data.items(): if not isinstance(v, str): fatal("value of secret must be string") if not isinstance(k, str): fatal("key of secret must be string") kae = ctx.obj['kae_api'] kae.set_cluster(cluster) appname = get_appname(appname=appname) with handle_console_err(): d = kae.set_secret(appname, data, replace) click.echo(info(str(d)))
def create_job(ctx, jobname, git, f, literal): specs_text = get_specs_text(f, literal) kae = ctx.obj['kae_api'] git = git or get_remote_url(remote=ctx.obj['remotename']) if not git: fatal("git url not found, please check repository or pass argument") branch = get_current_branch() with handle_console_err(): kae.create_job(jobname=jobname, git=git, specs_text=specs_text, branch=branch) click.echo(info('Create job done.'))
def register_release(ctx, appname, tag, git, f, literal): appname = get_appname(appname=appname) tag = get_git_tag(git_tag=tag) specs_text = get_specs_text(f, literal) kae = ctx.obj['kae_api'] git = git or get_remote_url(remote=ctx.obj['remotename']) if not git: fatal("git url not found, please check repository or pass argument") branch = get_current_branch() with handle_console_err(): kae.register_release(appname, tag, git, specs_text, branch=branch) click.echo(info('Register %s %s %s done.' % (appname, tag, git)))
def set_config(ctx, appname, name, f, literal): kae = ctx.obj['kae_api'] appname = get_appname(appname=appname) if literal: data = literal elif f: try: with open(f, "r") as fp: data = fp.read() except Exception: fatal("can't read config data from {}".format(f)) with handle_console_err(): d = kae.set_config(appname, name, data) ss = pprint.pformat(d) click.echo(info(ss))
def upload(ctx, appname, files, type): upload_files = [] kae = ctx.obj['kae_api'] for f in files: if not os.path.exists(f): fatal('File {} not exist'.format(f)) else: upload_files.append(('file', (f, open(f, 'rb')))) with handle_console_err(): res = kae.upload(appname, type, upload_files) if res['error']: fatal(res['error']) click.echo(info('upload successful'))
def build_local(appname, tag, f, literal, test): """build local image """ repo_dir = os.getcwd() if f: repo_dir = os.path.dirname(os.path.abspath(f)) appname = get_appname(cwd=repo_dir, appname=appname) tag = get_git_tag(cwd=repo_dir, git_tag=tag, required=False) if tag is None: tag = 'latest' specs_text = get_specs_text(f, literal) if specs_text is None: errmsg = [ "specs_text is required, please use one of the instructions to specify it.", "1. specify --literal or -f in coomand line", "2. make the current workdir in the source code dir which contains app.yaml" ] fatal('\n'.join(errmsg)) try: yaml_dict = yaml.load(specs_text) except yaml.YAMLError as e: fatal('specs text is invalid yaml {}'.format(str(e))) try: specs = app_specs_schema.load(yaml_dict).data except Exception as e: fatal('specs text is invalid: {}'.format(str(e))) builds = specs["builds"] if test: if "test" not in specs: fatal("no test specified in app.yaml") builds = specs['test']['builds'] if len(builds) == 0: fatal("no builds found") for build in builds: image_tag = build.tag if build.tag else tag dockerfile = build.get('dockerfile', None) if dockerfile is None: dockerfile = os.path.join(repo_dir, "Dockerfile") full_image_name = "{}:{}".format(build.name, image_tag) build_image(full_image_name, repo_dir, None, dockerfile=dockerfile)
def delete_sparkapp(ctx, appname, f): kae = ctx.obj['kae_api'] if f: if not os.path.exists(f): fatal('The yaml file not exists!') else: data = read_yaml_file(f) if not data: fatal('yaml error') else: appname = data['appname'] with handle_console_err(): result = kae.delete_sparkapp(appname) if not result['error']: click.echo( info('Delete spark application {} successfully'.format(appname))) else: click.echo(error(result['error']))
def create_sparkapp(ctx, mainfile, arguments, f, appname, apptype, schedule, concurrency_policy, image, pythonversion, conf, sparkversion, mode, jars, files, py_files, packages, repositories, driver_memory, driver_cores, executor_memory, executor_cores, number_executors, selector, comment): kae = ctx.obj['kae_api'] data = {} required = ['appname', 'image', 'mainfile'] if f: if not os.path.exists(f): fatal('The yaml file not exists!') else: data = read_yaml_file(f) if not data: fatal('yaml error') else: if not data.get('driver'): data['driver'] = {'cpu': 1, 'memory': '512m'} if not data.get('executor'): data['executor'] = { 'cpu': 1, 'memory': '512m', 'instances': 1 } else: data = { 'appname': appname, 'apptype': apptype, 'image': image, 'pythonVersion': pythonversion, 'driver': { 'cpu': driver_cores, 'memory': driver_memory, }, 'executor': { 'cpu': executor_cores, 'memory': executor_memory, 'instances': number_executors }, 'mainfile': mainfile, 'comment': comment or '' } sparkConf = {} for item in conf: k, v = item.split('=') sparkConf[k] = v if sparkConf: data['sparkConf'] = sparkConf nodeSelector = {} for item in selector: k, v = item.split('=') nodeSelector[k] = v if nodeSelector: data['nodeSelector'] = nodeSelector if arguments: data['arguments'] = list(arguments) if data['apptype'] == 'scheduledsparkapplication': if not schedule: fatal('Scheduledsparkapplication must should spec `shedule`') else: data['schedule'] = schedule data['concurrencyPolicy'] = concurrency_policy if jars: data['jars'] = jars.split(',') if files: data['files'] = files.split(',') if py_files: data['py-files'] = py_files.split(',') for require_key in required: if require_key not in data.keys(): fatal('Miss required argument {}'.format(require_key)) data.setdefault('deps', {}) jars_obj = [] files_obj = [] pyfiles_obj = [] remote_jars = [] remote_files = [] remote_pyfiles = [] # file_type_map = { # 'mainfile': 'mainApplicationFile', # 'jars': 'jars', # 'files': 'files', # 'py-files': 'pyFiles' # } remote_file_protocol = ['s3a://', 'hdfs://'] def pre_upload(path): protocol = path.split('//')[0] + '//' if protocol in remote_file_protocol: return False if not os.path.exists(path): fatal('File {} not exist'.format(path)) return True if pre_upload(data['mainfile']): mainfile_obj = (('file', open(data['mainfile'], 'rb')), ) data['mainApplicationFile'] = kae.upload(data['appname'], 'mainfile', mainfile_obj)['data']['path'] else: data['mainApplicationFile'] = data['mainfile'] for jar_path in data.get('jars', []): if pre_upload(jar_path): jars_obj.append(('file', open(jar_path, 'rb'))) else: remote_jars.append(jar_path) for file_path in data.get('files', []): if pre_upload(file_path): files_obj.append(('file', open(file_path, 'rb'))) else: remote_files.append(file_path) for pyfile_path in data.get('py-files', []): if pre_upload(pyfile_path): pyfiles_obj.append(('file', open(pyfile_path, 'rb'))) else: remote_pyfiles.append(pyfile_path) data['deps']['jars'] = kae.upload(data['appname'], 'jars', jars_obj)['data']['path'] + remote_jars data['deps']['files'] = kae.upload( data['appname'], 'files', files_obj)['data']['path'] + remote_files data['deps']['pyFiles'] = kae.upload( data['appname'], 'pyfiles', pyfiles_obj)['data']['path'] + remote_pyfiles with handle_console_err(): kae.create_sparkapp(data=data) click.echo(info('Create sparkapp done.'))