def crud_milestones(): data = request.get_json() project_id = data['project'] try: project = Project.query.filter(Project.id == project_id).one() except: return f'ERROR: Project not found', 404 milestones = project.data.get('milestones', {}) if request.method == 'GET': return jsonify(milestones) # The body for HTTP DELETE requests can be dropped by proxies (eg uwsgi, nginx...) # so it's simpler to reuse the POST method... if request.method == 'DELETE' or data.get('delete') in ['true', True]: del milestones[data['key']] else: milestones[data['key']] = data['milestone'] project.data['milestones'] = milestones flag_modified(project, "data") db_session.add(project) db_session.commit() print(f"UPDATE: Milestones {project_id}: {project.data['milestones']}") return jsonify(project.data['milestones'])
def update_commit(): try: commit = CiCommit.get_or_create( session=db_session, hexsha=request.json['git_commit_sha'], project_id=request.json['project'], ) except: return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404 if not commit.data: commit.data = {} commit_data = request.json.get('data', {}) commit.data = {**commit.data, **commit_data} flag_modified(commit, "data") if commit.deleted: commit.deleted = False db_session.add(commit) db_session.commit() return jsonify({"status": "OK"})
def update_batch(): data = request.get_json() try: ci_commit = CiCommit.get_or_create( session=db_session, hexsha=request.json['git_commit_sha'], project_id=request.json['project'], ) except: return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404 batch = ci_commit.get_or_create_batch(data['batch_label']) if not batch.data: batch.data = {} batch_data = request.json.get('data', {}) batch.data = {**batch.data, **batch_data} command = request.json.get('command') if command: batch.data["commands"] = {**batch.data.get('commands', {}), **command} flag_modified(batch, "data") is_best = 'best_iter' in batch_data and batch_data[ 'best_iter'] != batch.data.get('best_iter') if is_best: # remove all non-optim_iteration results from the batch batch.outputs = [ o for o in batch.outputs if o.output_type == 'optim_iteration' ] db_session.add(batch) db_session.commit() # make copy of all outputs in the best batch best_batch = ci_commit.get_or_create_batch( f"{data['batch_label']}|iter{batch_data.get('best_iter')}") for o in best_batch.outputs: o_copy = o.copy() o_copy.output_dir_override = str(o.output_dir) o_copy.batch = batch db_session.add(o_copy) db_session.add(batch) db_session.commit() return jsonify({"status": "OK"})
def start_tuning(hexsha): """ Request that we run extra tests for a given project. """ project_id = request.args["project"] data = request.get_json() try: ci_commit = CiCommit.query.filter( CiCommit.project_id == project_id, CiCommit.hexsha.startswith(hexsha)).one() except NoResultFound: return jsonify("Sorry, the commit id was not found"), 404 if "qatools_config" not in ci_commit.project.data: return jsonify("Please configure `qatools first`"), 404 ci_commit.latest_output_datetime = datetime.datetime.now() ci_commit.latest_output_datetime = datetime.datetime.now() batch = ci_commit.get_or_create_batch(data['batch_label']) db_session.add(ci_commit) db_session.commit() if ci_commit.deleted: # Now that we updated the last_output_datetime, it won't be deleted again until a little while return jsonify( "Artifacts for this commit were deleted! Re-run your CI pipeline, or `git checkout / build / qa --ci save-artifacts`" ), 404 groups_paths = [ *get_commit_groups_paths(ci_commit.project, hexsha), get_groups_path(project_id) ] # We store in this directory the scripts used to run this new batch, as well as the logs # We may instead want to use the folder where this batch's results are stored # Or even store the metadata in the database itself... prev_mask = os.umask(000) if not batch.output_dir.exists(): batch.output_dir.mkdir(exist_ok=True, parents=True) os.umask(prev_mask) working_directory = ci_commit.commit_dir print(working_directory) # This will make us do automated tuning, versus a single manual batch do_optimize = data['tuning_search']['search_type'] == 'optimize' if do_optimize: # we write somewhere the optimzation search configuration # it needs to be accessed from LSF so we can't use temporary files... config_path = batch.output_dir / 'optim-config.yaml' config_option = f"--config-file '{config_path}'" with config_path.open("w") as f: f.write(data['tuning_search']['parameter_search']) else: config_option = f"--tuning-search '{json.dumps(data['tuning_search'])}'" overwrite = "--action-on-existing run" if data[ "overwrite"] == "on" else "--action-on-existing sync" # FIXME: cd relative to main project batch_command = " ".join([ "qa", f"--platform '{data['platform']}'" if "platform" in data else "", f"--label '{data['batch_label']}'", "optimize" if do_optimize else "batch", ' '.join([f'--batches-file "{p}"' for p in groups_paths]), f"--batch '{data['selected_group']}'", config_option, f"{overwrite} --no-wait" if not do_optimize else '', "\n", ]) # print(batch_command) # To avoid issues with quoting, we write a script to run the batch, # and execute it with bsub/LSF # We could also play with heredocs-within-heredocs, but it is painful, and this way we get logs # openstf is our Android device farm use_openstf = data["android_device"].lower() == "openstf" parent_including_cwd = [ *list(reversed(list(working_directory.parents))), working_directory ] envrcs = [ f'source "{p}/.envrc"\n' for p in parent_including_cwd if (p / '.envrc').exists() ] qa_batch_script = "".join([ "#!/bin/bash\n", "set -xe\n\n", f'cd "{working_directory}";\n\n', ('\n'.join(envrcs) + '\n') if envrcs else "", # qa uses click, which hates non-utf8 locales 'export LC_ALL=en_US.utf8;\n', 'export LANG=en_US.utf8;\n\n', # we avoid DISPLAY issues with matplotlib, since we're headless here 'export MPLBACKEND=agg;\n', f"export RESERVED_ANDROID_DEVICE='{data['android_device']}';\n" if not use_openstf else "", # https://unix.stackexchange.com/questions/115129/why-does-root-not-have-usr-local-in-path # Those options are specific to android f"export RESERVED_ANDROID_DEVICE='{data['android_device']}';\n" if not use_openstf else "", f"export OPENSTF_STORAGE_QUOTA=12;\n" if not use_openstf else "", # Make sure qatools doesn't complain about not being in a git repository and knows where to save results f"\nexport CI=true;\n", f"export CI_COMMIT_SHA='{ci_commit.gitcommit.hexsha}';\n", f"export QATOOLS_CI_COMMIT_DIR='{ci_commit.commit_dir}';\n\n", batch_command, ]) print(qa_batch_script) qa_batch_path = batch.output_dir / f"qa_batch.sh" with qa_batch_path.open("w") as f: f.write(qa_batch_script) qatools_config = ci_commit.project.data["qatools_config"] lsf_config = qatools_config.get('runners', qatools_config).get("lsf", {}) default_user = lsf_config.get('user') user = data.get('user', default_user) if not user: return jsonify( "You must provide a user as whom to run the tuning experiment." ), 403 queue = lsf_config.get("fast_queue", lsf_config['queue']) start_script = "".join([ "#!/bin/bash\n", "set -xe\n\n", f'mkdir -p "{batch.output_dir}"\n', f'bsub_su "{user}" -q "{queue}" ', '-sp 4000 ', # highest priority for manual runs ## LSF refuses to give us long-running jobs.... ## '-W 24:00 ' if do_optimize else '-sp 4000 ', # highest priority for manual runs f'-o "{batch.output_dir}/log.txt" << "EOF"\n', f'\tssh -o StrictHostKeyChecking=no -q {user}@{user}-vdi \'bash "{qa_batch_path}"\'', '\nEOF' ]) print(start_script) start_path = batch.output_dir / f"start.sh" with start_path.open("w") as f: f.write(start_script) # Wraps and execute the script that starts the batch cmd = " ".join([ # there is only C.utf8 on our container, but it is not available on LSF "LC_ALL=en_US.utf8 LANG=en_US.utf8", "ssh", # quiet to avoid the welcome banner "-q", # ask, and force a TTY, otherwise bsub->su will complain "-tt", # make sure we OK the server key during the first-connection "-o StrictHostKeyChecking=no", # ispq is the only user that can use bsub_su, an alias for sudo -i -u {0} {1:}. "-i /home/arthurf/.ssh/ispq.id_rsa", "ispq@ispq-vdi", f'\'bash "{start_path}"\'', ]) print(cmd) try: out = subprocess.run(cmd, shell=True, encoding="utf-8", stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out.check_returncode() print(out.stdout) except: return jsonify({"error": str(out.stdout), "cmd": str(cmd)}), 500 return jsonify({"cmd": str(cmd), "stdout": str(out.stdout)})
def new_output_webhook(): """Updates the database when we get new results.""" data = request.get_json() # We get a handle on the Commit object related to our new output try: ci_commit = CiCommit.get_or_create( session=db_session, hexsha=data['git_commit_sha'], project_id=data['project'], ) except: return jsonify({ "error": f"Could not find your commit ({data['git_commit_sha']})." }), 404 ci_commit.project.latest_output_datetime = datetime.datetime.utcnow() ci_commit.latest_output_datetime = datetime.datetime.utcnow() # We make sure the Test on which we ran exists in the database test_input_path = data.get('input_path') if not test_input_path: return jsonify({"error": "the input path was not provided"}, 400) test_input = TestInput.get_or_create( db_session, path=test_input_path, database=data.get('database', ci_commit.project.database), ) # We save the basic information about our result batch = ci_commit.get_or_create_batch(data['batch_label']) if not batch.data: batch.data = {} batch.data.update({"type": data['job_type']}) if data.get('input_metadata'): test_input.data['metadata'] = data['input_metadata'] flag_modified(test_input, "data") output = Output.get_or_create( db_session, batch=batch, platform=data['platform'], configuration=data['configuration'], extra_parameters=data['extra_parameters'], test_input=test_input, ) output.output_type = data.get('input_type', '') # we can only trust CI outputs to run on the exact code from the commit output.data = data.get('data', {"ci": data['job_type'] == 'ci'}) if output.deleted: output.deleted = False # We allow users to save their data in custom locations # at the commit and output levels if Path(data.get('commit_ci_dir', ci_commit.commit_dir)) != ci_commit.commit_dir: ci_commit.commit_dir_override = data.get('commit_ci_dir') if Path(data.get('output_directory', output.output_dir)) != output.output_dir: output.output_dir_override = data.get('output_directory') # We update the output's status output.is_running = data.get('is_running', False) if output.is_running: output.is_pending = True else: output.is_pending = data.get('is_pending', False) # We save the output's metrics if not output.is_pending: metrics = data.get('metrics', {}) output.metrics = metrics output.is_failed = data.get('is_failed', False) or metrics.get('is_failed') db_session.add(output) db_session.commit() return jsonify(output.to_dict())
def get_ci_commit(commit_id=None): project_id = request.args['project'] if not commit_id: commit_id = request.args.get('commit', None) if not commit_id: try: project = Project.query.filter(Project.id==project_id).one() default_branch = project.data['qatools_config']['project']['reference_branch'] except: default_branch = 'master' branch = request.args.get('branch', default_branch) ci_commit = latest_successful_commit(db_session, project_id=project_id, branch=branch, batch_label=request.args.get('batch')) if not ci_commit: return jsonify({'error': f'Sorry, we cant find any commit with results for this project on {branch}.'}), 404 else: try: # we try a commit from git ci_commit = (db_session .query(CiCommit) .options( joinedload(CiCommit.batches). joinedload(Batch.outputs) ) .filter( CiCommit.project_id==project_id, CiCommit.hexsha.startswith(commit_id), ) .one() ) except MultipleResultsFound: print(f'!!!!!!!!!!!!! Multiple results for commit {commit_id} @{project_id}') ci_commit = (db_session .query(CiCommit) .options( joinedload(CiCommit.batches). joinedload(Batch.outputs) ) .filter( CiCommit.project_id==project_id, CiCommit.hexsha.startswith(commit_id), ) .first() ) except NoResultFound: try: commit = project.repo.commit(commit_id) ci_commit = CiCommit(commit, project=project) db_session.add(ci_commit) db_session.commit() except: return jsonify({'error': 'Sorry, we could not find the commit in the cloned git repo.'}), 404 except BadName: try: ci_commit = LocalCommit(commit_id) except: return jsonify({'error': 'Sorry, we could not find the commit folder.'}), 404 except Exception as e: raise(e) return jsonify({'error': 'Sorry, the request failed.'}), 500 # FIXME: we should add details about the outputs... # FIXME: how do we get the reference commit? batch = request.args.get('batch', None) with_batches = [batch] if batch else None # by default we show all batches with_aggregation = json.loads(request.args.get('metrics', '{}')) response = make_response(ujson.dumps(ci_commit.to_dict(with_aggregation, with_batches=with_batches, with_outputs=True))) response.headers['Content-Type'] = 'application/json' return response