Beispiel #1
0
def crud_milestones():
  data = request.get_json()
  project_id = data['project']
  try:
    project = Project.query.filter(Project.id == project_id).one()
  except:
    return f'ERROR: Project not found', 404
  milestones = project.data.get('milestones', {})
  
  if request.method == 'GET':
    return jsonify(milestones)

  # The body for HTTP DELETE requests can be dropped by proxies (eg uwsgi, nginx...)
  # so it's simpler to reuse the POST method...
  if request.method=='DELETE' or data.get('delete') in ['true', True]:
    del milestones[data['key']]
  else:
    milestones[data['key']] = data['milestone']

  project.data['milestones'] = milestones
  flag_modified(project, "data")
  db_session.add(project)
  db_session.commit()
  print(f"UPDATE: Milestones {project_id}: {project.data['milestones']}")
  return jsonify(project.data['milestones'])
Beispiel #2
0
def crud_output(output_id):
  output = Output.query.filter(Output.id==output_id).one()
  if request.method == 'GET':
    return jsonify(output.to_dict())
  if request.method == 'DELETE':
    if output.is_pending:
      return {"error": "Please wait for the Output to finish running before deleting it"}, 500
    output.delete(soft=False)
    db_session.delete(output)
    db_session.commit()
    return {"status": "OK"}
Beispiel #3
0
def update_batch():
    data = request.get_json()
    try:
        ci_commit = CiCommit.get_or_create(
            session=db_session,
            hexsha=request.json['git_commit_sha'],
            project_id=request.json['project'],
        )
    except:
        return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404

    batch = ci_commit.get_or_create_batch(data['batch_label'])
    if not batch.data:
        batch.data = {}
    batch_data = request.json.get('data', {})
    batch.data = {**batch.data, **batch_data}

    command = request.json.get('command')
    if command:
        batch.data["commands"] = {**batch.data.get('commands', {}), **command}
        flag_modified(batch, "data")

    is_best = 'best_iter' in batch_data and batch_data[
        'best_iter'] != batch.data.get('best_iter')
    if is_best:
        # remove all non-optim_iteration results from the batch
        batch.outputs = [
            o for o in batch.outputs if o.output_type == 'optim_iteration'
        ]
        db_session.add(batch)
        db_session.commit()
        # make copy of all outputs in the best batch
        best_batch = ci_commit.get_or_create_batch(
            f"{data['batch_label']}|iter{batch_data.get('best_iter')}")
        for o in best_batch.outputs:
            o_copy = o.copy()
            o_copy.output_dir_override = str(o.output_dir)
            o_copy.batch = batch
            db_session.add(o_copy)

    db_session.add(batch)
    db_session.commit()
    return jsonify({"status": "OK"})
Beispiel #4
0
def delete():
    # Deletes the session with given id (including json file)

    session_id = request.args.get('sid', None)
    if session_id is None or session_id == '':
        return make_response(jsonify({'errors': True, 'msg': 'No session id provided'}), 400)

    username = "******"
    user_id = 0
    if g.user.is_authenticated:
        username = g.user.username
        user_id = g.user.id

    if not user_id and not app.config['ALLOW_UNAUTHENTICATED_USER_DELETE']:
        # Unauthenticated users can't delete sessions
        return make_response(jsonify({'errors': True, 'msg': 'Unauthenticated user'}), 401)

    instance = Session.query.filter_by(id=session_id).first()
    if not instance:
        return make_response(jsonify({'errors': True, 'msg': 'Session not found'}), 400)

    if user_id != instance.user_id:
        return make_response(jsonify({'errors': True, 'msg': 'Unnauthorized user'}), 401)

    # Do delete...
    name = instance.name
    db_session.delete(instance)
    db_session.commit()
    file_path = '%s/%s/%s.json' % (app.config['SESSIONS_FOLDER_PATH'], instance.user_id, session_id)
    try:
        os.remove(file_path)
    except OSError:
        # No probme if file does not exist
        pass

    # Compute currently available sessions for the user (so that forntend does not need to
    # make an extra request to update the panel)
    user_sessions = get_available_sessions_for_user(user_id, username)
    demo_sessions = get_availavle_demo_sessions()

    return make_response(jsonify({'errors': False, 'name': name, 'id': session_id,
        'userSessions': user_sessions, 'demoSessions': demo_sessions }), 200)
Beispiel #5
0
def commit_save_artifacts():
  hexsha = request.json.get('hexsha')
  try:
      ci_commits = (db_session
                   .query(CiCommit)
                   .filter(
                     CiCommit.hexsha == hexsha,
                   )
                  )
  except:
    return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({hexsha})", 404
  for ci_commit in ci_commits.all():
    if not request.json['project'].startswith(ci_commit.project_id):
      print(f'skip {ci_commit.project_id}')
      continue
    print(f"[save-artifacts] {ci_commit}")
    # FIXME: in the clean crontab we remove commits without runs
    # if we rely on artifacts from a subproject without runs, it will cause issues... 
    # we should use the git info to find the qatools.yaml
    ci_commit.save_artifacts()
    ci_commit.deleted = False
    db_session.add(ci_commit)
    db_session.commit()
  return 'OK'
Beispiel #6
0
def start_tuning(hexsha):
    """
    Request that we run extra tests for a given project.
    """
    project_id = request.args["project"]
    data = request.get_json()

    try:
        ci_commit = CiCommit.query.filter(
            CiCommit.project_id == project_id,
            CiCommit.hexsha.startswith(hexsha)
        ).one()
    except NoResultFound:
        return jsonify("Sorry, the commit id was not found"), 404

    if "qatools_config" not in ci_commit.project.data:
        return jsonify("Please create `qaboard.yaml`"), 404

    ci_commit.latest_output_datetime = datetime.datetime.now()
    ci_commit.latest_output_datetime = datetime.datetime.now()
    batch = ci_commit.get_or_create_batch(data['batch_label'])
    db_session.add(ci_commit)
    db_session.commit()

    if ci_commit.deleted:
        # Now that we updated the last_output_datetime, it won't be deleted again until a little while
        return jsonify("Artifacts for this commit were deleted! Re-run your CI pipeline, or `git checkout / build / qa --ci save-artifacts`"), 404


    batches_paths = [*get_commit_batches_paths(ci_commit.project, hexsha), get_groups_path(project_id)]
    # We store in this directory the scripts used to run this new batch, as well as the logs
    # We may instead want to use the folder where this batch's results are stored
    # Or even store the metadata in the database itself...
    prev_mask = os.umask(000)
    if not batch.batch_dir.exists():
        batch.batch_dir.mkdir(exist_ok=True, parents=True)
    os.umask(prev_mask)


    working_directory = ci_commit.artifacts_dir
    print(working_directory)

    # This will make us do automated tuning, versus a single manual batch
    do_optimize = data['tuning_search']['search_type'] == 'optimize'
    if do_optimize:
        # we write somewhere the optimzation search configuration
        # it needs to be accessed from LSF so we can't use temporary files...
        config_path = batch.batch_dir / 'optim-config.yaml'
        config_option = f"--config-file '{config_path}'"
        with config_path.open("w") as f:
            f.write(data['tuning_search']['parameter_search'])
    else:
        config_option = f"--tuning-search '{json.dumps(data['tuning_search'])}'"

    overwrite = "--action-on-existing run" if data["overwrite"] in ("on", True) else "--action-on-existing sync"
    batch_command = " ".join([
        "qa",
        f"--platform '{data['platform']}'" if "platform" in data else "",
        f"--label '{data['batch_label']}'",
        "optimize" if do_optimize else "batch",
        ' '.join([f'--batches-file "{p}"' for p in batches_paths]),
        f"--batch '{data['selected_group']}'",
        # f"--runner=local", # uncomment if testing from Samsung SIRC where LSF is the default
        config_option,
        f"{overwrite} --no-wait" if not do_optimize else '',
    ])
    print(batch_command)

    # To avoid issues with quoting, we write a script to run the batch,
    # and execute it with bsub/LSF
    # We could also play with heredocs-within-heredocs, but it is painful, and this way we get logs
    # openstf is our Android device farm
    use_openstf = data["android_device"].lower() == "openstf"
    parent_including_cwd = [*list(reversed(list(working_directory.parents))), working_directory]
    envrcs = [f'source "{p}/.envrc"\n' for p in parent_including_cwd if (p / '.envrc').exists()]
    qa_batch_script = "\n".join([
        "#!/bin/bash",
        # qa uses click, which hates non-utf8 locales
        'export LC_ALL=C.UTF-8',
        'export LANG=C.UTF-8',
        "",
        # Avoid common DISPLAY issues with matplotlib, since we're headless here
        'export MPLBACKEND=agg',
        # Load all .envrc files relevant for the (sub)project
        ('\n'.join(envrcs) + '\n') if envrcs else "",
        "set -xe",
        f'cd "{working_directory}"',
        "",
        "",
        # Make sure QA-Board doesn't complain about not being in a git repository and knows where to save results
        f"export CI=true",
        f"export QABOARD_TUNING=true",
        f"export GIT_COMMIt='{ci_commit.hexsha}'",
        # Make sure QA-Board doesn't complain about not being in a git repository and knows where to save results
        f"export QA_OUTPUTS_COMMIT='{ci_commit.commit_dir}'",
        "",
        batch_command,
        "",
    ])
    print(qa_batch_script)
    qa_batch_path = batch.batch_dir / f"qa_batch.sh"
    with qa_batch_path.open("w") as f:
        f.write(qa_batch_script)

    batch.output_dir.mkdir(parents=True, exist_ok=True)
    cmd = ['bash', '-c', f'"{qa_batch_path}" &> "{batch.batch_dir}/log.txt"']
    print(cmd)
    try:
        out = subprocess.run(cmd, encoding='utf-8')
        out.check_returncode()
    except:
        return jsonify({"error": str(out.stdout), "cmd": str(cmd)}), 500
    return jsonify({"cmd": str(cmd), "stdout": str(out.stdout)})
Beispiel #7
0
def api_ci_commit(commit_id=None):
    # TODO: clean...
    if request.method == 'POST':
        try:
            commit = CiCommit.get_or_create(
                session=db_session,
                hexsha=request.json['git_commit_sha']
                if not commit_id else commit_id,
                project_id=request.json['project'],
            )
        except:
            return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404
        if not commit.data:
            commit.data = {}
        commit_data = request.json.get('data', {})
        commit.data = {**commit.data, **commit_data}
        flag_modified(commit, "data")
        if commit.deleted:
            commit.deleted = False
        db_session.add(commit)
        db_session.commit()
        return jsonify({"status": "OK"})

    project_id = request.args['project']
    if not commit_id:
        commit_id = request.args.get('commit', None)

    if not commit_id:
        try:
            project = Project.query.filter(Project.id == project_id).one()
            default_branch = project.data['qatools_config']['project'][
                'reference_branch']
        except:
            default_branch = 'master'
        branch = request.args.get('branch', default_branch)
        ci_commit = latest_successful_commit(
            db_session,
            project_id=project_id,
            branch=branch,
            batch_label=request.args.get('batch'))
        if not ci_commit:
            return jsonify({
                'error':
                f'Sorry, we cant find any commit with results for this project on {branch}.'
            }), 404
    else:
        try:  # we try a commit from git
            ci_commit = (db_session.query(CiCommit).options(
                joinedload(CiCommit.batches).joinedload(Batch.outputs)).filter(
                    CiCommit.project_id == project_id,
                    CiCommit.hexsha.startswith(commit_id),
                ).one())
        except MultipleResultsFound:
            print(
                f'!!!!!!!!!!!!! Multiple results for commit {commit_id} @{project_id}'
            )
            ci_commit = (db_session.query(CiCommit).options(
                joinedload(CiCommit.batches).joinedload(Batch.outputs)).filter(
                    CiCommit.project_id == project_id,
                    CiCommit.hexsha.startswith(commit_id),
                ).first())
        except NoResultFound:
            try:
                project = Project.query.filter(Project.id == project_id).one()
                commit = project.repo.tags[commit_id].commit
                try:
                    commit = project.repo.commit(commit_id)
                except:
                    try:
                        commit = project.repo.refs[commit_id].commit
                    except:
                        commit = project.repo.tags[commit_id].commit
                ci_commit = CiCommit(commit, project=project)
                db_session.add(ci_commit)
                db_session.commit()
            except:
                return jsonify({
                    'error':
                    'Sorry, we could not find the commit in the cloned git repo.'
                }), 404
        except BadName:
            return jsonify({
                f'error':
                'Sorry, we could not understand the commid ID {commid_id}.'
            }), 404
        except Exception as e:
            raise (e)
            return jsonify({'error': 'Sorry, the request failed.'}), 500
        # FIXME: we should add details about the outputs...
        # FIXME: how do we get the reference commit?

    batch = request.args.get('batch', None)
    with_batches = [batch] if batch else None  # by default we show all batches
    with_aggregation = json.loads(request.args.get('metrics', '{}'))
    response = make_response(
        ujson.dumps(
            ci_commit.to_dict(with_aggregation,
                              with_batches=with_batches,
                              with_outputs=True)))
    response.headers['Content-Type'] = 'application/json'
    return response
Beispiel #8
0
def start_tuning(hexsha):
    """
    Request that we run extra tests for a given project.
    """
    project_id = request.args["project"]
    data = request.get_json()

    try:
        ci_commit = CiCommit.query.filter(
            CiCommit.project_id == project_id,
            CiCommit.hexsha.startswith(hexsha)
        ).one()
    except NoResultFound:
        return jsonify("Sorry, the commit id was not found"), 404

    if "qatools_config" not in ci_commit.project.data:
        return jsonify("Please create `qaboard.yaml`"), 404

    ci_commit.latest_output_datetime = datetime.datetime.now()
    ci_commit.latest_output_datetime = datetime.datetime.now()
    batch = ci_commit.get_or_create_batch(data['batch_label'])
    db_session.add(ci_commit)
    db_session.commit()

    if ci_commit.deleted:
        # Now that we updated the last_output_datetime, it won't be deleted again until a little while
        return jsonify("Artifacts for this commit were deleted! Re-run your CI pipeline, or `git checkout / build / qa --ci save-artifacts`"), 404


    batches_paths = [*get_commit_batches_paths(ci_commit.project, hexsha), get_groups_path(project_id)]
    # We store in this directory the scripts used to run this new batch, as well as the logs
    # We may instead want to use the folder where this batch's results are stored
    # Or even store the metadata in the database itself...
    prev_mask = os.umask(000)
    if not batch.output_dir.exists():
        batch.output_dir.mkdir(exist_ok=True, parents=True)
    os.umask(prev_mask)


    working_directory = ci_commit.commit_dir
    print(working_directory)

    # This will make us do automated tuning, versus a single manual batch
    do_optimize = data['tuning_search']['search_type'] == 'optimize'
    if do_optimize:
        # we write somewhere the optimzation search configuration
        # it needs to be accessed from LSF so we can't use temporary files...
        config_path = batch.output_dir / 'optim-config.yaml'
        config_option = f"--config-file '{config_path}'"
        with config_path.open("w") as f:
            f.write(data['tuning_search']['parameter_search'])
    else:
        config_option = f"--tuning-search '{json.dumps(data['tuning_search'])}'"

    overwrite = "--action-on-existing run" if data["overwrite"] == "on" else "--action-on-existing sync"
    # FIXME: cd relative to main project
    batch_command = " ".join(
        [
            "qa",
            f"--platform '{data['platform']}'" if "platform" in data else "",
            f"--label '{data['batch_label']}'",
            "optimize" if do_optimize else "batch",
            ' '.join([f'--batches-file "{p}"' for p in batches_paths]),
            f"--batch '{data['selected_group']}'",
            config_option,
            f"{overwrite} --no-wait" if not do_optimize else '',
            "\n",
        ]
    )
    # print(batch_command)

    # To avoid issues with quoting, we write a script to run the batch,
    # and execute it with bsub/LSF
    # We could also play with heredocs-within-heredocs, but it is painful, and this way we get logs
    # openstf is our Android device farm
    use_openstf = data["android_device"].lower() == "openstf"
    parent_including_cwd = [*list(reversed(list(working_directory.parents))), working_directory]
    envrcs = [f'source "{p}/.envrc"\n' for p in parent_including_cwd if (p / '.envrc').exists()]
    qa_batch_script = "".join(
        [
            "#!/bin/bash\n",
            "set -xe\n\n",
            f'cd "{working_directory}";\n\n',
            ('\n'.join(envrcs) + '\n') if envrcs else "",
            # qa uses click, which hates non-utf8 locales
            'export LC_ALL=en_US.utf8;\n',
            'export LANG=en_US.utf8;\n\n',
            # we avoid DISPLAY issues with matplotlib, since we're headless here
            'export MPLBACKEND=agg;\n',

            f"export RESERVED_ANDROID_DEVICE='{data['android_device']}';\n" if not use_openstf else "",
            # https://unix.stackexchange.com/questions/115129/why-does-root-not-have-usr-local-in-path
            # Those options are specific to android
            f"export RESERVED_ANDROID_DEVICE='{data['android_device']}';\n" if not use_openstf else "",
            f"export OPENSTF_STORAGE_QUOTA=12;\n" if not use_openstf else "",

            # Make sure qatools doesn't complain about not being in a git repository and knows where to save results
            f"\nexport CI=true;\n",
            f"export CI_COMMIT_SHA='{ci_commit.hexsha}';\n",
            f"export QA_CI_CI_COMMIT_DIR='{ci_commit.commit_dir}';\n\n",
            batch_command,
        ]
    )
    print(qa_batch_script)
    qa_batch_path = batch.output_dir / f"qa_batch.sh"
    with qa_batch_path.open("w") as f:
        f.write(qa_batch_script)

    qatools_config = ci_commit.project.data["qatools_config"]
    lsf_config = qatools_config.get('runners', qatools_config).get("lsf", {})
    default_user = lsf_config.get('user')
    user = data.get('user', default_user)
    if not user:
        return jsonify("You must provide a user as whom to run the tuning experiment."), 403

    queue = lsf_config.get("fast_queue", lsf_config['queue'])
    start_script = "".join(
        [
            "#!/bin/bash\n",
            "set -xe\n\n",
            f'mkdir -p "{batch.output_dir}"\n',
            f'bsub_su "{user}" -q "{queue}" ',
            '-sp 4000 ', # highest priority for manual runs
            ## LSF refuses to give us long-running jobs....
            ## '-W 24:00 ' if do_optimize else '-sp 4000 ', # highest priority for manual runs
            f'-o "{batch.output_dir}/log.txt" << "EOF"\n',
            f'\tssh -o StrictHostKeyChecking=no -q {user}@{user}-vdi \'bash "{qa_batch_path}"\'',
            '\nEOF'
        ]
    )
    print(start_script)


    start_path = batch.output_dir / f"start.sh"
    with start_path.open("w") as f:
        f.write(start_script)

    # Wraps and execute the script that starts the batch
    cmd = " ".join(
        [
            # there is only C.utf8 on our container, but it is not available on LSF
            "LC_ALL=en_US.utf8 LANG=en_US.utf8",
            "ssh",
            # quiet to avoid the welcome banner
            "-q",
            # ask, and force a TTY, otherwise bsub->su will complain
            "-tt",
            # make sure we OK the server key during the first-connection
            "-o StrictHostKeyChecking=no",
            # ispq is the only user that can use bsub_su, an alias for sudo -i -u {0} {1:}.
            "-i /home/arthurf/.ssh/ispq.id_rsa",
            "ispq@ispq-vdi",
            f'\'bash "{start_path}"\'',
        ]
    )
    print(cmd)

    try:
        out = subprocess.run(cmd, shell=True, encoding="utf-8", stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
        out.check_returncode()
        print(out.stdout)
    except:
        return jsonify({"error": str(out.stdout), "cmd": str(cmd)}), 500
    return jsonify({"cmd": str(cmd), "stdout": str(out.stdout)})
Beispiel #9
0
def new_output_webhook():
    """Updates the database when we get new results."""
    data = request.get_json()

    # We get a handle on the Commit object related to our new output
    try:
        ci_commit = CiCommit.get_or_create(
            session=db_session,
            hexsha=data['git_commit_sha'],
            project_id=data['project'],
        )
    except:
        return jsonify({
            "error":
            f"Could not find your commit ({data['git_commit_sha']})."
        }), 404

    ci_commit.project.latest_output_datetime = datetime.datetime.utcnow()
    ci_commit.latest_output_datetime = datetime.datetime.utcnow()

    # We make sure the Test on which we ran exists in the database
    test_input_path = data.get('input_path')
    if not test_input_path:
        return jsonify({"error": "the input path was not provided"}, 400)
    test_input = TestInput.get_or_create(
        db_session,
        path=test_input_path,
        database=data.get('database', ci_commit.project.database),
    )

    # We save the basic information about our result
    batch = ci_commit.get_or_create_batch(data['batch_label'])
    if not batch.data:
        batch.data = {}
    batch.data.update({"type": data['job_type']})
    if data.get('input_metadata'):
        test_input.data['metadata'] = data['input_metadata']
        flag_modified(test_input, "data")

    platform = data['platform']
    # if platform == 'lsf':
    #   platform = 'linux'
    # elif platform == 'windows':
    #   platform = 'win32'

    configurations = deserialize_config(
        data['configuration']
    ) if 'configuration' in data else data['configurations']
    output = Output.get_or_create(
        db_session,
        batch=batch,
        platform=platform,
        configurations=configurations,
        extra_parameters=data['extra_parameters'],
        test_input=test_input,
    )
    output.output_type = data.get('input_type', '')

    output.data = data.get('data', {})
    # we can only trust CI outputs to run on the exact code from the commit
    output.data["ci"] = data['job_type'] == 'ci'
    if output.deleted:
        output.deleted = False

    # We allow users to save their data in custom locations
    # at the commit and output levels
    if Path(data.get('commit_ci_dir', ci_commit.commit_dir)).resolve() != Path(
            ci_commit.commit_dir):
        ci_commit.commit_dir_override = data.get('commit_ci_dir')
    if Path(data.get('output_directory',
                     output.output_dir)) != output.output_dir:
        output.output_dir_override = data.get('output_directory')

    # We update the output's status
    output.is_running = data.get('is_running', False)
    if output.is_running:
        output.is_pending = True
    else:
        output.is_pending = data.get('is_pending', False)

    # We save the output's metrics
    if not output.is_pending:
        metrics = data.get('metrics', {})
        output.metrics = metrics
        output.is_failed = data.get('is_failed',
                                    False) or metrics.get('is_failed')

    db_session.add(output)
    db_session.commit()
    return jsonify(output.to_dict())
Beispiel #10
0
def save():
    username = "******"
    user_id = 0
    if g.user.is_authenticated:
        username = g.user.username
        user_id = g.user.id

    if user_id == 0 and not app.config['ALLOW_UNAUTHENTICATED_USER_SAVE_LOAD']:
        return make_response(jsonify({'errors': True, 'msg': 'Unauthenticated user'}), 401)

    try:
        data = json.loads(request.data)
    except ValueError:
        return make_response(jsonify({'errors': True, 'msg': 'No posted data or posted data not readable'}), 400)

    # Create object in db
    session_id = request.args.get('sid', None)
    if not is_valid_uuid(session_id):
        session_id = None  # Session id comes from demo session, we create new version with new id
    if session_id is not None:
        instance = Session.query.filter_by(id=session_id).first()
        if not instance:
            return make_response(jsonify({'errors': True, 'msg': 'Session not found'}), 400)
        # If user is not the same as the one that created the session, add a new id and update
        # existing user id (this will effectively create a new session object)
        if user_id != instance.user_id:
            session_id = str(uuid.uuid4())
            instance.id = session_id
            instance.user_id = user_id
    else:
        # Should create a new session
        session_id = str(uuid.uuid4())
        instance = Session(id=session_id, user_id=user_id)
        instance.created = datetime.datetime.utcnow()

    data['session']['author'] = username
    if data['session']['name']:
        session_name = data['session']['name']
    else:
        n_existing_sessions = Session.query.filter_by(user_id=user_id).count()
        session_name = 'Untitled session #%i' % (n_existing_sessions + 1)
        data['session']['name'] = session_name
    instance.name = session_name
    instance.last_modified = datetime.datetime.utcnow()
    db_session.add(instance)
    db_session.commit()

    file_dir = '%s/%i' % (app.config['SESSIONS_FOLDER_PATH'], user_id)
    file_path = '%s/%s.json' % (file_dir, session_id)
    if os.path.exists(file_path):
        file_contents = json.load(open(file_path, 'r'))
    else:
        file_contents = {
            'id': session_id,
            'username': username,
            'data': [],
            'created': datetime.datetime.now().isoformat(),
        }
    file_contents['lastModified'] = datetime.datetime.now().isoformat()
    file_contents['data'] = data

    # Save session file
    if not os.path.exists(file_dir):
        os.mkdir(file_dir)
    json.dump(file_contents, open(file_path, 'w'), indent=4)


    return make_response(jsonify(
        {'errors': False, 'sessionName': session_name, 'sessionID': session_id }), 200)
Beispiel #11
0
def update_batch():
    data = request.get_json()
    try:
        ci_commit = CiCommit.get_or_create(
            session=db_session,
            hexsha=request.json['git_commit_sha'],
            project_id=request.json['project'],
            data=data,
        )
    except:
        return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404

    batch = ci_commit.get_or_create_batch(data['batch_label'])
    # prefix_output_dir for backward-compatibility
    batch.batch_dir_override = data.get("batch_dir",
                                        data.get("prefix_output_dir"))

    # Clients can store any metadata in each batch.
    # Currently it's used by `qa optimize` to store info on iterations
    if not batch.data:
        batch.data = {}
    batch_data = request.json.get('data', {})
    # And each batch can have changes vs its commit's config and metrics.
    # The use case is usually working locally with `qa --share` and
    # seeing updated visualizations and metrics.
    if "qaboard_config" in data and data["qaboard_config"] != ci_commit.data[
            "qatools_config"]:
        batch.data["config"] = data["qaboard_config"]
    if "qaboard_metrics" in data and data["qaboard_metrics"] != ci_commit.data[
            "qatools_metrics"]:
        batch.data["qatools_metrics"] = data["qaboard_metrics"]
    batch.data = {**batch.data, **batch_data}

    # Save info on each "qa batch" command in the batch, mainly to list them in logs
    command = request.json.get('command')
    if command:
        batch.data["commands"] = {**batch.data.get('commands', {}), **command}
        flag_modified(batch, "data")

    # It's a `qa optimzize` experiment
    if batch_data.get('optimization'):
        if 'best_iter' in batch_data:
            # we will save the outputs from the best iteration in the batch,
            # so first we need to remove any previous best results
            for o in batch.outputs:
                if o.output_type != 'optim_iteration':
                    o.delete(soft=False)
            db_session.add(batch)
            db_session.commit()
            # Move results from the best iteration in this batch
            batch_batch_label = batch_data['last_iteration_label']
            best_batch = ci_commit.get_or_create_batch(batch_batch_label)
            for o in best_batch.outputs:
                o.output_dir_override = str(o.output_dir)
                o.batch = batch
                db_session.add(o)
            db_session.commit()

            for b in ci_commit.batches:
                if b.label.startswith(
                        f"{data['batch_label']}|iter"
                ) and b.label != batch_data['last_iteration_label']:
                    print(f'Deleting previous iteration {b.label}')
                    if b.label != batch_data['last_iteration_label']:
                        db_session.delete(b)

    db_session.add(batch)
    db_session.commit()
    return jsonify({"status": "OK", "id": batch.id})
Beispiel #12
0
def new_output_webhook():
    """Updates the database when we get new results."""
    data = request.get_json()
    hexsha = data.get('commit_sha', data['git_commit_sha'])
    # We get a handle on the Commit object related to our new output
    try:
        ci_commit = CiCommit.get_or_create(
            session=db_session,
            hexsha=hexsha,
            project_id=data['project'],
            data=data,
        )
    except Exception as e:
        return jsonify({
            "error":
            f"Could not find your commit ({data['git_commit_sha']}). {e}"
        }), 404

    ci_commit.project.latest_output_datetime = datetime.datetime.utcnow()
    ci_commit.latest_output_datetime = datetime.datetime.utcnow()

    # We make sure the Test on which we ran exists in the database
    test_input_path = data.get('rel_input_path', data.get('input_path'))
    if not test_input_path:
        return jsonify({"error": "the input path was not provided"}), 400
    test_input = TestInput.get_or_create(
        db_session,
        path=test_input_path,
        database=data['database'],
    )

    # We save the basic information about our result
    batch = ci_commit.get_or_create_batch(data['batch_label'])
    if not batch.data:
        batch.data = {}
    batch.data.update({"type": data['job_type']})
    if data.get('input_metadata'):
        test_input.data['metadata'] = data['input_metadata']
        flag_modified(test_input, "data")

    platform = data['platform']
    # for backward-compat with old clients
    if platform == 'lsf':
        platform = 'linux'

    configurations = deserialize_config(
        data['configuration']
    ) if 'configuration' in data else data['configurations']
    output = Output.get_or_create(
        db_session,
        batch=batch,
        platform=platform,
        configurations=configurations,
        extra_parameters=data['extra_parameters'],
        test_input=test_input,
    )
    output.output_type = data.get('input_type', '')

    output.data = data.get('data', {})
    # we can only trust CI outputs to run on the exact code from the commit
    output.data["ci"] = data['job_type'] == 'ci'
    if output.deleted:
        output.deleted = False

    # prefix_output_dir for backward-compatibility
    ci_commit.commit_dir_override = data.get('artifacts_commit',
                                             data.get('commit_ci_dir'))
    output.output_dir_override = data['output_directory']

    # We update the output's status
    output.is_running = data.get('is_running', False)
    if output.is_running:
        output.is_pending = True
    else:
        output.is_pending = data.get('is_pending', False)

    # We save the output's metrics
    if not output.is_pending:
        metrics = data.get('metrics', {})
        output.metrics = metrics
        output.is_failed = data.get('is_failed',
                                    False) or metrics.get('is_failed')

    db_session.add(ci_commit)
    db_session.add(output)
    db_session.commit()
    return jsonify(output.to_dict())
Beispiel #13
0
def api_ci_commit(commit_id=None):
  if request.method == 'POST':
    hexsha = request.json.get('commit_sha', request.json['git_commit_sha']) if not commit_id else commit_id
    try:
      commit = CiCommit.get_or_create(
        session=db_session,
        hexsha=hexsha,
        project_id=request.json['project'],
        data=request.json,
      )
    except:
      return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404
    if not commit.data:
      commit.data = {}
    # Clients can store any metadata with each commit.
    # We've been using it to store code quality metrics per subproject in our monorepo,
    # Then we use other tools (e.g. metabase) to create dashboards.
    commit_data = request.json.get('data', {})
    commit.data = {**commit.data, **commit_data}
    flag_modified(commit, "data")
    if commit.deleted:
      commit.deleted = False
    db_session.add(commit)
    db_session.commit()
    return jsonify({"status": "OK"})


  project_id = request.args['project']
  if not commit_id:
    commit_id = request.args.get('commit', None)
    try:
      project = Project.query.filter(Project.id==project_id).one()
      default_branch = project.data['qatools_config']['project']['reference_branch']
    except:
      default_branch = 'master'
    branch = request.args.get('branch', default_branch)
    ci_commit = latest_successful_commit(db_session, project_id=project_id, branch=branch, batch_label=request.args.get('batch'))
    if not ci_commit:
      return jsonify({'error': f'Sorry, we cant find any commit with results for the {project_id} on {branch}.'}), 404
  else:
    try:
      ci_commit = (db_session
                   .query(CiCommit)
                   .options(
                     joinedload(CiCommit.batches).
                     joinedload(Batch.outputs)
                    )
                   .filter(
                     CiCommit.project_id==project_id,
                     CiCommit.hexsha.startswith(commit_id),
                   )
                   .one()
                  )
    except NoResultFound:
      try:
        # TODO: This is a valid use case for having read-rights to the repo,
        #       we can identify a commit by the tag/branch
        #       To replace this without read rights, we should listen for push events and build a database
        project = Project.query.filter(Project.id==project_id).one()
        commit = project.repo.tags[commit_id].commit
        try:
          commit = project.repo.commit(commit_id)
        except:
          try:
            commit = project.repo.refs[commit_id].commit
          except:
            commit = project.repo.tags[commit_id].commit
        ci_commit = CiCommit(commit, project=project)
        db_session.add(ci_commit)
        db_session.commit()
      except:
        return jsonify({'error': f'Sorry, we could not find any data on commit {commit_id} in project {project_id}.'}), 404
    except BadName:
      return jsonify({f'error': f'Sorry, we could not understand the commid ID {commit_id} for project {project_id}.'}), 404
    except Exception as e:
      raise(e)
      return jsonify({'error': 'Sorry, the request failed.'}), 500

  batch = request.args.get('batch', None)
  with_batches = [batch] if batch else None # by default we show all batches
  with_aggregation = json.loads(request.args.get('metrics', '{}'))
  response = make_response(ujson.dumps(ci_commit.to_dict(with_aggregation, with_batches=with_batches, with_outputs=True)))
  response.headers['Content-Type'] = 'application/json'
  return response