Ejemplo n.º 1
0
 def save_check_stub(
     self,
     job_id: str,
     report_name: str,
     report_title: Optional[str] = "",
     job_start_time: Optional[datetime.datetime] = None,
     status: JobStatus = JobStatus.PENDING,
     overrides: Optional[Dict] = None,
     mailto: str = "",
     generate_pdf_output: bool = True,
 ) -> None:
     """ Call this when we are just starting a check. Saves a "pending" job into storage. """
     job_start_time = job_start_time or datetime.datetime.now()
     report_title = report_title or report_name
     pending_result = NotebookResultPending(
         job_id=job_id,
         status=status,
         report_title=report_title,
         job_start_time=job_start_time,
         report_name=report_name,
         mailto=mailto,
         generate_pdf_output=generate_pdf_output,
         overrides=overrides or {},
     )
     self._save_to_db(pending_result)
Ejemplo n.º 2
0
def test_report_hunter_with_one(bson_library, mongo_host, test_db_name,
                                test_lib_name):
    serializer = PyMongoNotebookResultSerializer(
        database_name=test_db_name,
        mongo_host=mongo_host,
        result_collection_name=test_lib_name)

    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())
    serializer.save_check_stub(job_id, report_name)
    _report_hunter(
        Serializer.PYMONGO.value,
        mongo_host=mongo_host,
        database_name=test_db_name,
        result_collection_name=test_lib_name,
        run_once=True,
    )
    expected = NotebookResultPending(
        job_id=job_id,
        report_name=report_name,
        report_title=report_name,
        update_time=datetime.datetime(2018, 1, 12),
        job_start_time=datetime.datetime(2018, 1, 12),
    )
    assert get_report_cache(report_name, job_id) == expected
Ejemplo n.º 3
0
def test_report_hunter_timeout(bson_library, mongo_host, status, time_later,
                               should_timeout, test_db_name, test_lib_name):
    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())

    serializer = PyMongoNotebookResultSerializer(
        database_name=test_db_name,
        mongo_host=mongo_host,
        result_collection_name=test_lib_name)
    start_time = time_now = datetime.datetime(2018, 1, 12, 2, 30)
    with freezegun.freeze_time(time_now):
        serializer.save_check_stub(job_id, report_name, status=status)
        _report_hunter(
            Serializer.PYMONGO.value,
            mongo_host=mongo_host,
            database_name=test_db_name,
            result_collection_name=test_lib_name,
            run_once=True,
        )
        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=status,
            update_time=time_now,
            job_start_time=start_time,
        )
        assert get_report_cache(report_name, job_id) == expected

    time_now += time_later
    with freezegun.freeze_time(time_now):
        _report_hunter(
            Serializer.PYMONGO.value,
            mongo_host=mongo_host,
            database_name=test_db_name,
            result_collection_name=test_lib_name,
            run_once=True,
        )

        if should_timeout:
            mins = (time_later.total_seconds() / 60) - 1
            expected = NotebookResultError(
                job_id=job_id,
                report_name=report_name,
                report_title=report_name,
                status=JobStatus.TIMEOUT,
                update_time=time_now,
                job_start_time=start_time,
                error_info=
                "This request timed out while being submitted to run. "
                "Please try again! "
                "Timed out after {:.0f} minutes 0 seconds.".format(mins),
            )
        else:
            # expected does not change
            pass
        assert get_report_cache(report_name, job_id) == expected
Ejemplo n.º 4
0
def test_report_hunter_with_status_change(bson_library, mongo_host,
                                          test_db_name, test_lib_name):
    initialise_base_dirs()
    serializer = PyMongoNotebookResultSerializer(
        database_name=test_db_name,
        mongo_host=mongo_host,
        result_collection_name=test_lib_name)

    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())
    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 30)):
        serializer.save_check_stub(job_id, report_name)
        _report_hunter(
            Serializer.PYMONGO.value,
            mongo_host=mongo_host,
            database_name=test_db_name,
            result_collection_name=test_lib_name,
            run_once=True,
        )
        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            update_time=datetime.datetime(2018, 1, 12, 2, 30),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
        )
        assert get_report_cache(report_name, job_id) == expected

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 32)):
        serializer.update_check_status(job_id,
                                       JobStatus.CANCELLED,
                                       error_info="This was cancelled!")
        _report_hunter(
            Serializer.PYMONGO.value,
            mongo_host=mongo_host,
            database_name=test_db_name,
            result_collection_name=test_lib_name,
            run_once=True,
        )

        expected = NotebookResultError(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.CANCELLED,
            update_time=datetime.datetime(2018, 1, 12, 2, 32),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
            error_info="This was cancelled!",
        )
        assert get_report_cache(report_name, job_id) == expected
Ejemplo n.º 5
0
def test_report_hunter_timeout(bson_library, status, time_later,
                               should_timeout, webapp_config):
    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())

    serializer = initialize_serializer_from_config(webapp_config)
    start_time = time_now = datetime.datetime(2018, 1, 12, 2, 30)
    with freezegun.freeze_time(time_now):
        serializer.save_check_stub(job_id, report_name, status=status)
        _report_hunter(webapp_config=webapp_config, run_once=True)
        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=status,
            update_time=time_now,
            job_start_time=start_time,
        )
        assert get_report_cache(report_name,
                                job_id,
                                cache_dir=webapp_config.CACHE_DIR) == expected

    time_now += time_later
    with freezegun.freeze_time(time_now):
        _report_hunter(webapp_config=webapp_config, run_once=True)

        if should_timeout:
            mins = (time_later.total_seconds() / 60) - 1
            expected = NotebookResultError(
                job_id=job_id,
                report_name=report_name,
                report_title=report_name,
                status=JobStatus.TIMEOUT,
                update_time=time_now,
                job_start_time=start_time,
                error_info=
                "This request timed out while being submitted to run. "
                "Please try again! "
                "Timed out after {:.0f} minutes 0 seconds.".format(mins),
            )
        else:
            # expected does not change
            pass
        assert get_report_cache(report_name,
                                job_id,
                                cache_dir=webapp_config.CACHE_DIR) == expected
Ejemplo n.º 6
0
def test_report_hunter_with_one(bson_library, webapp_config):
    serializer = initialize_serializer_from_config(webapp_config)

    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())
    serializer.save_check_stub(job_id, report_name)
    _report_hunter(webapp_config=webapp_config, run_once=True)
    expected = NotebookResultPending(
        job_id=job_id,
        report_name=report_name,
        report_title=report_name,
        update_time=datetime.datetime(2018, 1, 12),
        job_start_time=datetime.datetime(2018, 1, 12),
    )
    assert get_report_cache(report_name,
                            job_id,
                            cache_dir=webapp_config.CACHE_DIR) == expected
Ejemplo n.º 7
0
def test_report_hunter_with_status_change(bson_library, webapp_config):
    initialise_base_dirs(webapp_config=webapp_config)
    serializer = initialize_serializer_from_config(webapp_config)

    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())
    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 30)):
        serializer.save_check_stub(job_id, report_name)
        _report_hunter(webapp_config=webapp_config, run_once=True)
        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            update_time=datetime.datetime(2018, 1, 12, 2, 30),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
        )
        assert get_report_cache(report_name,
                                job_id,
                                cache_dir=webapp_config.CACHE_DIR) == expected

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 32)):
        serializer.update_check_status(job_id,
                                       JobStatus.CANCELLED,
                                       error_info="This was cancelled!")
        _report_hunter(webapp_config=webapp_config, run_once=True)

        expected = NotebookResultError(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.CANCELLED,
            update_time=datetime.datetime(2018, 1, 12, 2, 32),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
            error_info="This was cancelled!",
        )
        assert get_report_cache(report_name,
                                job_id,
                                cache_dir=webapp_config.CACHE_DIR) == expected
Ejemplo n.º 8
0
def test_report_hunter_pending_to_done(bson_library, mongo_host, test_db_name,
                                       test_lib_name):
    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())
    serializer = PyMongoNotebookResultSerializer(
        database_name=test_db_name,
        mongo_host=mongo_host,
        result_collection_name=test_lib_name)

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 30)):
        serializer.save_check_stub(job_id,
                                   report_name,
                                   status=JobStatus.SUBMITTED)
        _report_hunter(
            Serializer.PYMONGO.value,
            mongo_host=mongo_host,
            database_name=test_db_name,
            result_collection_name=test_lib_name,
            run_once=True,
        )
        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.SUBMITTED,
            update_time=datetime.datetime(2018, 1, 12, 2, 30),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
        )
        assert get_report_cache(report_name, job_id) == expected

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 32)):
        serializer.update_check_status(job_id, JobStatus.PENDING)
        _report_hunter(
            Serializer.PYMONGO.value,
            mongo_host=mongo_host,
            database_name=test_db_name,
            result_collection_name=test_lib_name,
            run_once=True,
        )

        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.PENDING,
            update_time=datetime.datetime(2018, 1, 12, 2, 32),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
        )
        assert get_report_cache(report_name, job_id) == expected

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 37)):
        serializer.update_check_status(
            job_id,
            JobStatus.DONE,
            raw_html_resources={"outputs": {}},
            job_finish_time=datetime.datetime.now(),
            pdf="",
            raw_ipynb_json="[]",
            raw_html="",
        )
        _report_hunter(
            Serializer.PYMONGO.value,
            mongo_host=mongo_host,
            database_name=test_db_name,
            result_collection_name=test_lib_name,
            run_once=True,
        )

        expected = NotebookResultComplete(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.DONE,
            update_time=datetime.datetime(2018, 1, 12, 2, 37),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
            job_finish_time=datetime.datetime(2018, 1, 12, 2, 37),
            raw_html="",
            raw_html_resources={"outputs": {}},
            raw_ipynb_json="[]",
        )
        assert get_report_cache(report_name, job_id) == expected
Ejemplo n.º 9
0
    def _convert_result(
        self,
        result: Dict,
        load_payload: bool = True
    ) -> Union[NotebookResultError, NotebookResultComplete,
               NotebookResultPending, None]:
        if not result:
            return None

        status = result.get("status", "")
        job_status = JobStatus.from_string(status)
        if job_status is None:
            return None
        cls = {
            JobStatus.CANCELLED: NotebookResultError,
            JobStatus.DONE: NotebookResultComplete,
            JobStatus.PENDING: NotebookResultPending,
            JobStatus.ERROR: NotebookResultError,
            JobStatus.SUBMITTED: NotebookResultPending,
            JobStatus.TIMEOUT: NotebookResultError,
            JobStatus.DELETED: None,
        }.get(job_status)
        if cls is None:
            return None

        if load_payload and job_status == JobStatus.DONE:

            def read_file(path):
                try:
                    return self.result_data_store.get_last_version(path).read()
                except NoFile:
                    logger.error("Could not find file %s in %s", path,
                                 self.result_data_store)
                    return ""

            outputs = {
                path: read_file(path)
                for path in result.get("raw_html_resources", {}).get(
                    "outputs", [])
            }
            result["raw_html_resources"]["outputs"] = outputs
            if result.get("generate_pdf_output"):
                pdf_filename = _pdf_filename(result["job_id"])
                result["pdf"] = read_file(pdf_filename)

        if cls == NotebookResultComplete:
            return NotebookResultComplete(
                job_id=result["job_id"],
                job_start_time=result["job_start_time"],
                report_name=result["report_name"],
                status=job_status,
                update_time=result["update_time"],
                job_finish_time=result["job_finish_time"],
                raw_html_resources=result.get("raw_html_resources", {}),
                raw_ipynb_json=result.get("raw_ipynb_json"),
                raw_html=result.get("raw_html"),
                pdf=result.get("pdf", ""),
                overrides=result.get("overrides", {}),
                generate_pdf_output=result.get("generate_pdf_output", True),
                report_title=result.get("report_title", result["report_name"]),
                mailto=result.get("mailto", ""),
                stdout=result.get("stdout", []),
            )
        elif cls == NotebookResultPending:
            return NotebookResultPending(
                job_id=result["job_id"],
                job_start_time=result["job_start_time"],
                report_name=result["report_name"],
                status=job_status,
                update_time=result["update_time"],
                overrides=result.get("overrides", {}),
                generate_pdf_output=result.get("generate_pdf_output", True),
                report_title=result.get("report_title", result["report_name"]),
                mailto=result.get("mailto", ""),
                stdout=result.get("stdout", []),
            )

        elif cls == NotebookResultError:
            return NotebookResultError(
                job_id=result["job_id"],
                job_start_time=result["job_start_time"],
                report_name=result["report_name"],
                status=job_status,
                update_time=result["update_time"],
                error_info=result["error_info"],
                overrides=result.get("overrides", {}),
                generate_pdf_output=result.get("generate_pdf_output", True),
                report_title=result.get("report_title", result["report_name"]),
                mailto=result.get("mailto", ""),
                stdout=result.get("stdout", []),
            )
        else:
            raise ValueError(
                "Could not deserialise {} into result object.".format(result))
Ejemplo n.º 10
0
def test_report_hunter_pending_to_done(bson_library, webapp_config):
    job_id = str(uuid.uuid4())
    report_name = str(uuid.uuid4())
    serializer = initialize_serializer_from_config(webapp_config)

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 30)):
        serializer.save_check_stub(job_id,
                                   report_name,
                                   status=JobStatus.SUBMITTED)
        _report_hunter(webapp_config=webapp_config, run_once=True)
        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.SUBMITTED,
            update_time=datetime.datetime(2018, 1, 12, 2, 30),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
        )
        assert get_report_cache(report_name,
                                job_id,
                                cache_dir=webapp_config.CACHE_DIR) == expected

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 32)):
        serializer.update_check_status(job_id, JobStatus.PENDING)
        _report_hunter(webapp_config=webapp_config, run_once=True)

        expected = NotebookResultPending(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.PENDING,
            update_time=datetime.datetime(2018, 1, 12, 2, 32),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
        )
        assert get_report_cache(report_name,
                                job_id,
                                cache_dir=webapp_config.CACHE_DIR) == expected

    with freezegun.freeze_time(datetime.datetime(2018, 1, 12, 2, 37)):
        serializer.update_check_status(
            job_id,
            JobStatus.DONE,
            raw_html_resources={"outputs": {}},
            job_finish_time=datetime.datetime.now(),
            pdf="",
            raw_ipynb_json="[]",
            raw_html="",
        )
        _report_hunter(webapp_config=webapp_config, run_once=True)

        expected = NotebookResultComplete(
            job_id=job_id,
            report_name=report_name,
            report_title=report_name,
            status=JobStatus.DONE,
            update_time=datetime.datetime(2018, 1, 12, 2, 37),
            job_start_time=datetime.datetime(2018, 1, 12, 2, 30),
            job_finish_time=datetime.datetime(2018, 1, 12, 2, 37),
            raw_html="",
            raw_html_resources={"outputs": {}},
            raw_ipynb_json="[]",
        )
        assert get_report_cache(report_name,
                                job_id,
                                cache_dir=webapp_config.CACHE_DIR) == expected