def run_suite(ctx, replace, include, exclude, glob, skip_dsl_jobs, skip_pipeline_jobs, skip_data, data_only, clean_data, **kwargs): """ process testsuite """ ctx.obj.update(**kwargs) ctx.obj.post_process() config_inst = ctx.obj["config"] if clean_data is None: clean_data = config_inst.clean_data namespace = ctx.obj["namespace"] yes = ctx.obj["yes"] data_namespace_mangling = ctx.obj["namespace_mangling"] # prepare output dir and json hooks _add_replace_hook(replace) echo.welcome() echo.echo(f"testsuite namespace: {namespace}", fg='red') echo.echo("loading testsuites:") suites = _load_testsuites(includes=include, excludes=exclude, glob=glob) for suite in suites: echo.echo(f"\tdataset({len(suite.dataset)}) dsl jobs({len(suite.jobs)}) " f"pipeline jobs ({len(suite.pipeline_jobs)}) {suite.path}") if not yes and not click.confirm("running?"): return echo.stdout_newline() with Clients(config_inst) as client: for i, suite in enumerate(suites): # noinspection PyBroadException try: start = time.time() echo.echo(f"[{i + 1}/{len(suites)}]start at {time.strftime('%Y-%m-%d %X')} {suite.path}", fg='red') if not skip_data: try: _upload_data(client, suite, config_inst) except Exception as e: raise RuntimeError(f"exception occur while uploading data for {suite.path}") from e if data_only: continue if not skip_dsl_jobs: echo.stdout_newline() try: _submit_job(client, suite, namespace, config_inst) except Exception as e: raise RuntimeError(f"exception occur while submit job for {suite.path}") from e if not skip_pipeline_jobs: try: _run_pipeline_jobs(config_inst, suite, namespace, data_namespace_mangling) except Exception as e: raise RuntimeError(f"exception occur while running pipeline jobs for {suite.path}") from e if not skip_data and clean_data: _delete_data(client, suite) echo.echo(f"[{i + 1}/{len(suites)}]elapse {timedelta(seconds=int(time.time() - start))}", fg='red') if not skip_dsl_jobs or not skip_pipeline_jobs: echo.echo(suite.pretty_final_summary(), fg='red') except Exception: exception_id = uuid.uuid1() echo.echo(f"exception in {suite.path}, exception_id={exception_id}") LOGGER.exception(f"exception id: {exception_id}") finally: echo.stdout_newline() echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')
def run_task(ctx, job_type, include, replace, timeout, update_job_parameters, update_component_parameters, max_iter, max_depth, num_trees, task_cores, storage_tag, history_tag, skip_data, clean_data, provider, **kwargs): """ Test the performance of big data tasks, alias: bp """ ctx.obj.update(**kwargs) ctx.obj.post_process() config_inst = ctx.obj["config"] config_inst.extend_sid = ctx.obj["extend_sid"] config_inst.auto_increasing_sid = ctx.obj["auto_increasing_sid"] namespace = ctx.obj["namespace"] yes = ctx.obj["yes"] data_namespace_mangling = ctx.obj["namespace_mangling"] if clean_data is None: clean_data = config_inst.clean_data def get_perf_template(conf: Config, job_type): perf_dir = os.path.join( os.path.abspath(conf.perf_template_dir) + '/' + job_type + '/' + "*testsuite.json") return glob.glob(perf_dir) if not include: include = get_perf_template(config_inst, job_type) # prepare output dir and json hooks _add_replace_hook(replace) echo.welcome() echo.echo(f"testsuite namespace: {namespace}", fg='red') echo.echo("loading testsuites:") suites = _load_testsuites(includes=include, excludes=tuple(), glob=None, provider=provider) for i, suite in enumerate(suites): echo.echo( f"\tdataset({len(suite.dataset)}) dsl jobs({len(suite.jobs)}) {suite.path}" ) if not yes and not click.confirm("running?"): return echo.stdout_newline() with Clients(config_inst) as client: for i, suite in enumerate(suites): # noinspection PyBroadException try: start = time.time() echo.echo( f"[{i + 1}/{len(suites)}]start at {time.strftime('%Y-%m-%d %X')} {suite.path}", fg='red') if not skip_data: try: _upload_data(client, suite, config_inst) except Exception as e: raise RuntimeError( f"exception occur while uploading data for {suite.path}" ) from e echo.stdout_newline() try: time_consuming = _submit_job( client, suite, namespace, config_inst, timeout, update_job_parameters, storage_tag, history_tag, update_component_parameters, max_iter, max_depth, num_trees, task_cores) except Exception as e: raise RuntimeError( f"exception occur while submit job for {suite.path}" ) from e try: _run_pipeline_jobs(config_inst, suite, namespace, data_namespace_mangling) except Exception as e: raise RuntimeError( f"exception occur while running pipeline jobs for {suite.path}" ) from e echo.echo( f"[{i + 1}/{len(suites)}]elapse {timedelta(seconds=int(time.time() - start))}", fg='red') if not skip_data and clean_data: _delete_data(client, suite) echo.echo(suite.pretty_final_summary(time_consuming), fg='red') except Exception: exception_id = uuid.uuid1() echo.echo( f"exception in {suite.path}, exception_id={exception_id}") LOGGER.exception(f"exception id: {exception_id}") finally: echo.stdout_newline() echo.farewell() echo.echo(f"testsuite namespace: {namespace}", fg='red')