def delete(ctx, include, exclude, glob, yes, suite_type, **kwargs): """ delete data defined in suite config files """ ctx.obj.update(**kwargs) ctx.obj.post_process() namespace = ctx.obj["namespace"] config_inst = ctx.obj["config"] echo.echo(f"testsuite namespace: {namespace}", fg='red') echo.echo("loading testsuites:") suffix = "benchmark.json" if suite_type == "benchmark" else "testsuite.json" suites = _load_testsuites(includes=include, excludes=exclude, glob=glob, suffix=suffix, suite_type=suite_type) if not yes and not click.confirm("running?"): return for suite in suites: echo.echo(f"\tdataset({len(suite.dataset)}) {suite.path}") if not yes and not click.confirm("running?"): return with Clients(config_inst) as client: for i, suite in enumerate(suites): _delete_data(client, suite) echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def run_benchmark(ctx, include, exclude, glob, skip_data, tol, clean_data, storage_tag, history_tag, match_details, **kwargs): """ process benchmark suite, alias: bq """ ctx.obj.update(**kwargs) ctx.obj.post_process() namespace = ctx.obj["namespace"] config_inst = ctx.obj["config"] config_inst.extend_sid = ctx.obj["extend_sid"] config_inst.auto_increasing_sid = ctx.obj["auto_increasing_sid"] if clean_data is None: clean_data = config_inst.clean_data data_namespace_mangling = ctx.obj["namespace_mangling"] yes = ctx.obj["yes"] echo.welcome("benchmark") echo.echo(f"testsuite namespace: {namespace}", fg='red') echo.echo("loading testsuites:") suites = _load_testsuites(includes=include, excludes=exclude, glob=glob, suffix="benchmark.json", suite_type="benchmark") for suite in suites: echo.echo(f"\tdataset({len(suite.dataset)}) benchmark groups({len(suite.pairs)}) {suite.path}") if not yes and not click.confirm("running?"): return with Clients(config_inst) as client: fate_version = client["guest_0"].get_version() for i, suite in enumerate(suites): # noinspection PyBroadException try: start = time.time() echo.echo(f"[{i + 1}/{len(suites)}]start at {time.strftime('%Y-%m-%d %X')} {suite.path}", fg='red') if not skip_data: try: _upload_data(client, suite, config_inst) except Exception as e: raise RuntimeError(f"exception occur while uploading data for {suite.path}") from e try: _run_benchmark_pairs(config_inst, suite, tol, namespace, data_namespace_mangling, storage_tag, history_tag, fate_version, match_details) except Exception as e: raise RuntimeError(f"exception occur while running benchmark jobs for {suite.path}") from e if not skip_data and clean_data: _delete_data(client, suite) echo.echo(f"[{i + 1}/{len(suites)}]elapse {timedelta(seconds=int(time.time() - start))}", fg='red') except Exception: exception_id = uuid.uuid1() echo.echo(f"exception in {suite.path}, exception_id={exception_id}", err=True, fg='red') LOGGER.exception(f"exception id: {exception_id}") finally: echo.stdout_newline() echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def upload(ctx, include, exclude, glob, suite_type, role, config_type, **kwargs): """ upload data defined in suite config files """ ctx.obj.update(**kwargs) ctx.obj.post_process() namespace = ctx.obj["namespace"] config_inst = ctx.obj["config"] config_inst.extend_sid = ctx.obj["extend_sid"] config_inst.auto_increasing_sid = ctx.obj["auto_increasing_sid"] yes = ctx.obj["yes"] echo.welcome() echo.echo(f"testsuite namespace: {namespace}", fg='red') if len(include) != 0: echo.echo("loading testsuites:") suffix = "benchmark.json" if suite_type == "benchmark" else "testsuite.json" suites = _load_testsuites(includes=include, excludes=exclude, glob=glob, suffix=suffix, suite_type=suite_type) for suite in suites: if role != "all": suite.dataset = [ d for d in suite.dataset if re.match(d.role_str, role) ] echo.echo(f"\tdataset({len(suite.dataset)}) {suite.path}") if not yes and not click.confirm("running?"): return client_upload(suites=suites, config_inst=config_inst, namespace=namespace) else: config = get_config(config_inst) if config_type == 'min_test': config_file = config.min_test_data_config else: config_file = config.all_examples_data_config with open(config_file, 'r', encoding='utf-8') as f: upload_data = json.loads(f.read()) echo.echo(f"\tdataset({len(upload_data['data'])}) {config_file}") if not yes and not click.confirm("running?"): return with Clients(config_inst) as client: data_upload(client, config_inst, upload_data) echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def _config(ctx, **kwargs): """ check connection """ ctx.obj.update(**kwargs) config_inst = parse_config(ctx.obj.get("config")) with Clients(config_inst) as clients: roles = clients.all_roles() for r in roles: try: version, address = clients[r].check_connection() except Exception as e: click.echo( f"[X]connection fail, role is {r}, exception is {e.args}") else: click.echo( f"[✓]connection {address} ok, fate version is {version}, role is {r}" )
def query_schema(ctx, component_name, job_id, role, party_id, **kwargs): """ query the meta of the output data of a component """ ctx.obj.update(**kwargs) ctx.obj.post_process() namespace = ctx.obj["namespace"] yes = ctx.obj["yes"] config_inst = ctx.obj["config"] echo.welcome() echo.echo(f"testsuite namespace: {namespace}", fg='red') if not yes and not click.confirm("running?"): return with Clients(config_inst) as client: query_component_output_data(client, config_inst, component_name, job_id, role, party_id) echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def upload(ctx, include, exclude, glob, suite_type, role, **kwargs): """ upload data defined in suite config files """ ctx.obj.update(**kwargs) ctx.obj.post_process() namespace = ctx.obj["namespace"] config_inst = ctx.obj["config"] yes = ctx.obj["yes"] echo.echo(f"testsuite namespace: {namespace}", fg='red') echo.echo("loading testsuites:") suffix = "benchmark.json" if suite_type == "benchmark" else "testsuite.json" suites = _load_testsuites(includes=include, excludes=exclude, glob=glob, suffix=suffix, suite_type=suite_type) for suite in suites: if role != "all": suite.dataset = [d for d in suite.dataset if re.match(d.role_str, role)] echo.echo(f"\tdataset({len(suite.dataset)}) {suite.path}") if not yes and not click.confirm("running?"): return with Clients(config_inst) as client: for i, suite in enumerate(suites): # noinspection PyBroadException try: echo.echo(f"[{i + 1}/{len(suites)}]start at {time.strftime('%Y-%m-%d %X')} {suite.path}", fg='red') try: _upload_data(client, suite, config_inst) except Exception as e: raise RuntimeError(f"exception occur while uploading data for {suite.path}") from e except Exception: exception_id = uuid.uuid1() echo.echo(f"exception in {suite.path}, exception_id={exception_id}") LOGGER.exception(f"exception id: {exception_id}") finally: echo.stdout_newline() echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def client_upload(suites, config_inst, namespace, output_path=None): with Clients(config_inst) as client: for i, suite in enumerate(suites): # noinspection PyBroadException try: echo.echo( f"[{i + 1}/{len(suites)}]start at {time.strftime('%Y-%m-%d %X')} {suite.path}", fg='red') try: _upload_data(client, suite, config_inst, output_path) except Exception as e: raise RuntimeError( f"exception occur while uploading data for {suite.path}" ) from e except Exception: exception_id = uuid.uuid1() echo.echo( f"exception in {suite.path}, exception_id={exception_id}") LOGGER.exception(f"exception id: {exception_id}") finally: echo.stdout_newline() echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def run_suite(ctx, replace, include, exclude, glob, skip_dsl_jobs, skip_pipeline_jobs, skip_data, data_only, clean_data, **kwargs): """ process testsuite """ ctx.obj.update(**kwargs) ctx.obj.post_process() config_inst = ctx.obj["config"] if clean_data is None: clean_data = config_inst.clean_data namespace = ctx.obj["namespace"] yes = ctx.obj["yes"] data_namespace_mangling = ctx.obj["namespace_mangling"] # prepare output dir and json hooks _add_replace_hook(replace) echo.welcome() echo.echo(f"testsuite namespace: {namespace}", fg='red') echo.echo("loading testsuites:") suites = _load_testsuites(includes=include, excludes=exclude, glob=glob) for suite in suites: echo.echo(f"\tdataset({len(suite.dataset)}) dsl jobs({len(suite.jobs)}) " f"pipeline jobs ({len(suite.pipeline_jobs)}) {suite.path}") if not yes and not click.confirm("running?"): return echo.stdout_newline() with Clients(config_inst) as client: for i, suite in enumerate(suites): # noinspection PyBroadException try: start = time.time() echo.echo(f"[{i + 1}/{len(suites)}]start at {time.strftime('%Y-%m-%d %X')} {suite.path}", fg='red') if not skip_data: try: _upload_data(client, suite, config_inst) except Exception as e: raise RuntimeError(f"exception occur while uploading data for {suite.path}") from e if data_only: continue if not skip_dsl_jobs: echo.stdout_newline() try: _submit_job(client, suite, namespace, config_inst) except Exception as e: raise RuntimeError(f"exception occur while submit job for {suite.path}") from e if not skip_pipeline_jobs: try: _run_pipeline_jobs(config_inst, suite, namespace, data_namespace_mangling) except Exception as e: raise RuntimeError(f"exception occur while running pipeline jobs for {suite.path}") from e if not skip_data and clean_data: _delete_data(client, suite) echo.echo(f"[{i + 1}/{len(suites)}]elapse {timedelta(seconds=int(time.time() - start))}", fg='red') if not skip_dsl_jobs or not skip_pipeline_jobs: echo.echo(suite.pretty_final_summary(), fg='red') except Exception: exception_id = uuid.uuid1() echo.echo(f"exception in {suite.path}, exception_id={exception_id}") LOGGER.exception(f"exception id: {exception_id}") finally: echo.stdout_newline() echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def run_task(ctx, job_type, include, replace, timeout, update_job_parameters, update_component_parameters, max_iter, max_depth, num_trees, task_cores, storage_tag, history_tag, skip_data, clean_data, provider, **kwargs): """ Test the performance of big data tasks, alias: bp """ ctx.obj.update(**kwargs) ctx.obj.post_process() config_inst = ctx.obj["config"] config_inst.extend_sid = ctx.obj["extend_sid"] config_inst.auto_increasing_sid = ctx.obj["auto_increasing_sid"] namespace = ctx.obj["namespace"] yes = ctx.obj["yes"] data_namespace_mangling = ctx.obj["namespace_mangling"] if clean_data is None: clean_data = config_inst.clean_data def get_perf_template(conf: Config, job_type): perf_dir = os.path.join( os.path.abspath(conf.perf_template_dir) + '/' + job_type + '/' + "*testsuite.json") return glob.glob(perf_dir) if not include: include = get_perf_template(config_inst, job_type) # prepare output dir and json hooks _add_replace_hook(replace) echo.welcome() echo.echo(f"testsuite namespace: {namespace}", fg='red') echo.echo("loading testsuites:") suites = _load_testsuites(includes=include, excludes=tuple(), glob=None, provider=provider) for i, suite in enumerate(suites): echo.echo( f"\tdataset({len(suite.dataset)}) dsl jobs({len(suite.jobs)}) {suite.path}" ) if not yes and not click.confirm("running?"): return echo.stdout_newline() with Clients(config_inst) as client: for i, suite in enumerate(suites): # noinspection PyBroadException try: start = time.time() echo.echo( f"[{i + 1}/{len(suites)}]start at {time.strftime('%Y-%m-%d %X')} {suite.path}", fg='red') if not skip_data: try: _upload_data(client, suite, config_inst) except Exception as e: raise RuntimeError( f"exception occur while uploading data for {suite.path}" ) from e echo.stdout_newline() try: time_consuming = _submit_job( client, suite, namespace, config_inst, timeout, update_job_parameters, storage_tag, history_tag, update_component_parameters, max_iter, max_depth, num_trees, task_cores) except Exception as e: raise RuntimeError( f"exception occur while submit job for {suite.path}" ) from e try: _run_pipeline_jobs(config_inst, suite, namespace, data_namespace_mangling) except Exception as e: raise RuntimeError( f"exception occur while running pipeline jobs for {suite.path}" ) from e echo.echo( f"[{i + 1}/{len(suites)}]elapse {timedelta(seconds=int(time.time() - start))}", fg='red') if not skip_data and clean_data: _delete_data(client, suite) echo.echo(suite.pretty_final_summary(time_consuming), fg='red') except Exception: exception_id = uuid.uuid1() echo.echo( f"exception in {suite.path}, exception_id={exception_id}") LOGGER.exception(f"exception id: {exception_id}") finally: echo.stdout_newline() echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')