Ejemplo n.º 1
0
def add_result_start_time(run_id):
    """ Update all results in a run to add the 'start_time' field to a result"""
    redis_client = Redis.from_url(settings["CELERY_BROKER_URL"])
    try:
        # Get a lock so that we don't run this task concurrently
        with redis_client.lock(f"update-run-lock-{run_id}",
                               blocking_timeout=LOCK_EXPIRE):
            run = mongo.runs.find_one({"_id": ObjectId(run_id)})
            if not run:
                return
            results = mongo.results.find({"metadata.run": run_id})
            for result in results:
                result = serialize(result)
                if not result.get("start_time"):
                    result["start_time"] = result.get("starttime")
                    mongo.results.replace_one({"_id": ObjectId(result["id"])},
                                              result)
    except LockError:
        # If this task is locked, discard it so that it doesn't clog up the system
        pass
Ejemplo n.º 2
0
def add_import(import_file=None, *args, **kwargs):
    """Imports a JUnit XML file and creates a test run and results from it.

    :param import_file: file to upload
    :type import_file: werkzeug.datastructures.FileStorage

    :rtype: Import
    """
    if not import_file:
        return "Bad request, no file uploaded", 400
    new_import = {"status": "pending", "filename": import_file.filename, "format": "", "run_id": ""}
    mongo.imports.insert_one(new_import)
    new_import = serialize(new_import)
    mongo.import_files.upload_from_stream(
        import_file.filename, import_file.stream, metadata={"importId": new_import["id"]}
    )
    if import_file.filename.endswith(".xml"):
        run_junit_import.delay(new_import)
    elif import_file.filename.endswith(".tar.gz"):
        run_archive_import.delay(new_import)
    else:
        return "Unsupported Media Type", 415
    return new_import, 202
Ejemplo n.º 3
0
def add_run(run=None):
    """Create a new run

    :param body: Run object
    :type body: dict | bytes

    :rtype: Run
    """
    if not connexion.request.is_json:
        return "Bad request, JSON is required", 400
    run_dict = connexion.request.get_json()
    current_time = datetime.utcnow()
    if "created" not in run_dict:
        run_dict["created"] = current_time.isoformat()
    if "start_time" not in run_dict:
        run_dict["start_time"] = current_time.timestamp()
    if "id" in run_dict:
        run_dict["_id"] = ObjectId(run_dict["id"])
    if run_dict.get("metadata") and run_dict.get("metadata", {}).get("project"):
        run_dict["metadata"]["project"] = get_project_id(run_dict["metadata"]["project"])
    mongo.runs.insert_one(run_dict)
    run_dict = serialize(run_dict)
    update_run_task.apply_async((run_dict["id"],), countdown=5)
    return run_dict, 201
Ejemplo n.º 4
0
def get_result_list(filter_=None, page=1, page_size=25):
    """Gets all results

    The `filter` parameter takes a list of filters to apply in the form of:

        {name}{operator}{value}

    where:

      - `name` is any valid column in the database
      - `operator` is one of `=`, `!`, `>`, `<`, `)`, `(`, `~`, `*`
      - `value` is what you want to filter by

    Operators are simple correspondents to MongoDB's query selectors:

      - `=` becomes `$eq`
      - `!` becomes `$ne`
      - `>` becomes `$gt`
      - `<` becomes `$lt`
      - `)` becomes `$gte`
      - `(` becomes `$lte`
      - `~` becomes `$regex`
      - `*` becomes `$in`
      - `@` becomes `$exists`

    Note:

    For the `$exists` operator, "true", "t", "yes", "y" and `1` will all be considered true,
    all other values are considered false.


    Example queries:

        /result?filter=metadata.run=63fe5
        /result?filter=test_id~neg
        /result?filter=result!passed


    :param filter: A list of filters to apply
    :param pageSize: Limit the number of results returned, defaults to 25
    :param page: Offset the results list, defaults to 0

    :rtype: List[Result]
    """
    filters = {}
    if filter_:
        for filter_string in filter_:
            filter_obj = generate_filter_object(filter_string)
            if filter_obj:
                filters.update(filter_obj)
    offset = (page * page_size) - page_size
    total_items = mongo.results.count(filters)
    total_pages = (total_items // page_size) + (1 if total_items % page_size > 0 else 0)
    results = mongo.results.find(
        filters, skip=offset, limit=page_size, sort=[("start_time", DESCENDING)]
    )
    return {
        "results": [serialize(result) for result in results],
        "pagination": {
            "page": page,
            "pageSize": page_size,
            "totalItems": total_items,
            "totalPages": total_pages,
        },
    }
Ejemplo n.º 5
0
def run_junit_import(import_):
    """Import a test run from a JUnit file"""
    # Update the status of the import
    import_["status"] = "running"
    mongo.imports.replace_one({"_id": ObjectId(import_["id"])}, import_)
    # Fetch the file contents
    try:
        import_file = [f for f in mongo.import_files.find({"metadata.importId": import_["id"]})][0]
    except KeyError:
        import_["status"] = "error"
        mongo.imports.replace_one({"_id": ObjectId(import_["id"])}, import_)
        return
    # Parse the XML and create a run object(s)
    tree = objectify.parse(import_file)
    root = tree.getroot()
    import_["run_id"] = []
    for testsuite in root.testsuite:
        run_dict = {
            "created": datetime.fromtimestamp(time.time()).isoformat(),
            "start_time": parser.parse(testsuite.get("timestamp")).strftime("%s"),
            "duration": testsuite.get("time"),
            "summary": {
                "errors": testsuite.get("errors"),
                "failures": testsuite.get("failures"),
                "skips": testsuite.get("skipped"),
                "tests": testsuite.get("tests"),
            },
        }
        # Insert the run, and then update the import with the run id
        mongo.runs.insert_one(run_dict)
        run_dict = serialize(run_dict)
        import_["run_id"].append(run_dict["id"])
        mongo.imports.replace_one({"_id": ObjectId(import_["id"])}, import_)
        # Import the contents of the XML file
        for testcase in testsuite.testcase:
            test_name = testcase.get("name").split(".")[-1]
            if testcase.get("classname"):
                test_name = testcase.get("classname").split(".")[-1] + "." + test_name
            result_dict = {
                "test_id": test_name,
                "start_time": run_dict["start_time"],
                "duration": float(testcase.get("time")),
                "metadata": {
                    "run": run_dict["id"],
                    "fspath": testcase.get("file"),
                    "line": testcase.get("line"),
                },
                "params": {},
                "source": testsuite.get("name"),
            }
            skip_reason, traceback = None, None
            if testcase.find("failure"):
                result_dict["result"] = "failed"
                traceback = bytes(str(testcase.failure), "utf8")
            elif testcase.find("error"):
                result_dict["result"] = "error"
                traceback = bytes(str(testcase.error), "utf8")
            elif testcase.find("skipped"):
                result_dict["result"] = "skipped"
                skip_reason = str(testcase.skipped)
            else:
                result_dict["result"] = "passed"

            if skip_reason:
                result_dict["metadata"]["skip_reason"] = skip_reason

            rec = mongo.results.insert_one(result_dict)

            if traceback:
                mongo.fs.upload_from_stream(
                    "traceback.log",
                    traceback,
                    metadata={"contentType": "text/plain", "resultId": str(rec.inserted_id)},
                )

            if testcase.find("system-out"):
                system_out = bytes(str(testcase["system-out"]), "utf8")
                mongo.fs.upload_from_stream(
                    "system-out.log",
                    system_out,
                    metadata={"contentType": "text/plain", "resultId": str(rec.inserted_id)},
                )
            if testcase.find("system-err"):
                system_err = bytes(str(testcase["system-err"]), "utf8")
                mongo.fs.upload_from_stream(
                    "system-err.log",
                    system_err,
                    metadata={"contentType": "text/plain", "resultId": str(rec.inserted_id)},
                )
    import_["status"] = "done"
    mongo.imports.replace_one({"_id": ObjectId(import_["id"])}, import_)
Ejemplo n.º 6
0
def run_archive_import(import_):
    """Import a test run from an Ibutsu archive file"""
    # Update the status of the import
    import_["status"] = "running"
    mongo.imports.replace_one({"_id": ObjectId(import_["id"])}, import_)
    # Fetch the file contents
    try:
        import_file = [f for f in mongo.import_files.find({"metadata.importId": import_["id"]})][0]
    except KeyError:
        import_["status"] = "error"
        mongo.imports.replace_one({"_id": ObjectId(import_["id"])}, import_)
        return

    # First open the tarball and pull in the results
    run = None
    run_dict = None
    results = []
    result_artifacts = {}
    current_dir = None
    result = None
    artifacts = []
    start_time = None
    with tarfile.open(mode="r:gz", fileobj=import_file) as tar:
        # run through the files and dirs, skipping the first one as it is the base directory
        for member in tar.getmembers()[1:]:
            if member.isdir() and member.name != current_dir:
                if result:
                    results.append(result)
                    result_artifacts[result["id"]] = artifacts
                artifacts = []
                result = None
            elif member.name.endswith("result.json"):
                result = json.loads(tar.extractfile(member).read())
                result_start_time = result.get("start_time", result.get("starttime"))
                if not start_time or start_time > result_start_time:
                    start_time = result_start_time
            elif member.name.endswith("run.json"):
                run = json.loads(tar.extractfile(member).read())
            elif member.isfile():
                artifacts.append(member)
        if result:
            results.append(result)
            result_artifacts[result["id"]] = artifacts
        if run:
            run_dict = run
        else:
            run_dict = {
                "duration": 0,
                "summary": {"errors": 0, "failures": 0, "skips": 0, "tests": 0},
            }
        # patch things up a bit, if necessary
        if run_dict.get("start_time") and not run_dict.get("created"):
            run_dict["created"] = run_dict["start_time"]
        elif run_dict.get("created") and not run_dict.get("start_time"):
            run_dict["start_time"] = run_dict["created"]
        elif not run_dict.get("created") and not run_dict.get("start_time"):
            run_dict["created"] = start_time
            run_dict["start_time"] = start_time
        if run_dict.get("metadata", {}).get("project"):
            run_dict["metadata"]["project"] = get_project_id(run_dict["metadata"]["project"])
        # If this run has a valid ObjectId, check if this run exists
        run_exists = False
        if run_dict.get("id") and ObjectId.is_valid(run_dict["id"]):
            # Just check if this exists first
            run_exists = mongo.runs.find_one({"_id": ObjectId(run_dict["id"])}) is not None
        if run_exists:
            mongo.run_dicts.replace_one({"_id": ObjectId(run_dict["id"])}, run_dict)
        else:
            if run_dict.get("id"):
                del run_dict["id"]
            mongo.runs.insert_one(run_dict)
        run_dict = serialize(run_dict)
        import_["run_id"] = run_dict["id"]
        # Now loop through all the results, and create or update them
        for result in results:
            artifacts = result_artifacts.get(result["id"], [])
            _create_result(tar, run_dict["id"], result, artifacts)
    # Update the import record
    import_["status"] = "done"
    mongo.imports.replace_one({"_id": ObjectId(import_["id"])}, import_)
    if run_dict:
        update_run.delay(run_dict["id"])