Пример #1
0
def zest_input_request():
    zest.stack_mock(log._interactive_emit_line)

    def it_calls_input_when_not_headless():
        with zest.mock(log.is_headless, returns=False):
            with zest.mock(log._input, returns="ret test 1") as m_input:
                ret = log.input_request("test1", "when headless")
                assert m_input.called_once()
                assert ret == "ret test 1"

    def it_handles_headless_mode():
        m_input = zest.stack_mock(log._input)
        zest.stack_mock(log.is_headless, returns=True)

        def it_does_not_call_input_when_headless():
            ret = log.input_request("test2", "when headless")
            assert m_input.not_called()
            assert ret == "when headless"

        def it_raises_when_headless_and_theres_an_exception_passed():
            ret = log.input_request("test2", "when headless")
            assert m_input.not_called()
            assert ret == "when headless"

        zest()

    zest()
Пример #2
0
    def out_of_date():
        m_parent_timestamps = zest.stack_mock(PipelineTask._parent_timestamps)
        m_child_timestamps = zest.stack_mock(PipelineTask._child_timestamps)

        def it_returns_false_on_no_parent_files():
            m_parent_timestamps.returns([])
            assert PipelineTask._out_of_date("parent", "child")[0] is False

        def it_returns_true_on_no_child_files():
            m_parent_timestamps.returns([("a0", 1)])
            m_child_timestamps.returns([])
            assert PipelineTask._out_of_date("parent", "child")[0] is True

        def it_returns_true_if_any_parent_file_is_younger_than_youngest_child(
        ):
            m_parent_timestamps.returns([("p0", 3)])
            m_child_timestamps.returns([("c0", 1), ("c1", 2)])
            assert PipelineTask._out_of_date("parent", "child")[0] is True

        def it_returns_false_if_all_parent_files_are_older_than_all_child_files(
        ):
            m_parent_timestamps.returns([("p0", 1)])
            m_child_timestamps.returns([("c0", 2), ("c1", 3)])
            assert PipelineTask._out_of_date("parent", "child")[0] is False

        zest()
Пример #3
0
    def it_handles_headless_mode():
        m_input = zest.stack_mock(log._input)
        zest.stack_mock(log.is_headless, returns=True)

        def it_does_not_call_input_when_headless():
            ret = log.input_request("test2", "when headless")
            assert m_input.not_called()
            assert ret == "when headless"

        def it_raises_when_headless_and_theres_an_exception_passed():
            ret = log.input_request("test2", "when headless")
            assert m_input.not_called()
            assert ret == "when headless"

        zest()
Пример #4
0
def zest_ImageOps_dump():
    imstack = np.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
    m_imstack_write = zest.stack_mock(imops._imstack_write,
                                      reset_before_each=True)

    def it_dumps_to_the_root_path_if_set():
        imops.dump_set_root_path("/dir")
        imops.dump("name", imstack)
        assert m_imstack_write.called_once_with("/dir", "name.npy", imstack)

    def it_dumps_to_local_path_if_root_not_set():
        imops.dump_set_root_path(None)
        imops.dump("name", imstack)
        assert m_imstack_write.called_once_with(None, "name.npy", imstack)

    def it_converts_lists_to_array_stacks():
        list_stack = [imstack[:, :, 0], imstack[:, :, 1]]
        imops.dump("name", list_stack)

    def it_converts_2d_arrays_to_3d():
        im = imstack[:, :, 0]
        assert im.ndim == 2
        imops.dump("name", im)

    def it_raises_on_malformed_arrays():
        with zest.raises(AssertionError):
            im = imstack[:, 0, 0]
            imops.dump("name", im)

    zest()
Пример #5
0
def zest_check_args():
    zest.stack_mock(check.log.error)

    def it_raises_on_bad_argument_global():
        with zest.raises(check.CheckError) as e:
            my_test_global_func(1, 2)

    def it_raises_on_bad_argument_local():
        with zest.raises(check.CheckError) as e:

            @check.args
            def myfunc(a: str, b):
                assert isinstance(a, str)

            myfunc(1, 2)

    zest()
Пример #6
0
def zest_do_ptm_permutations():
    df = pd.DataFrame(
        dict(
            pep_i=[
                1,
                1,
                1,
                1,
                1,
                1,
            ],
            aa=list("ABCDEF"),
            pep_offset_in_pro=list(range(6)),
            pro_ptm_locs=["1;4"] * 6,
        ))
    n_ptms_limit = 5

    zest.stack_mock(prep_worker._info)

    def it_adds_permutation_labels():
        new_pep_seqs = _do_ptm_permutations(df, n_ptms_limit)
        assert len(new_pep_seqs) == 3
        # remember the pro_ptm_locs string is 1-based
        assert new_pep_seqs[0].aa.tolist() == ["A[p]", "B", "C", "D", "E", "F"]
        assert new_pep_seqs[1].aa.tolist() == ["A", "B", "C", "D[p]", "E", "F"]
        assert new_pep_seqs[2].aa.tolist() == [
            "A[p]", "B", "C", "D[p]", "E", "F"
        ]
        assert np.all(np.isnan(new_pep_seqs[0].pep_i.values))

    def it_handles_empty_ptm_locations():
        df_empty = df.copy()
        df_empty.pro_ptm_locs = ""
        new_pep_seqs = _do_ptm_permutations(df_empty, n_ptms_limit)
        assert len(new_pep_seqs) == 0

    def it_handles_too_many_permutations():
        # we allow 5 per peptide at present
        df_too_many = df.copy()
        df_too_many.pro_ptm_locs = "1;2;3;4;5;6"
        new_pep_seqs = _do_ptm_permutations(df_too_many, n_ptms_limit)
        assert len(new_pep_seqs) == 0

    zest()
Пример #7
0
        def it_mocks_an_external_symbol_with_resets():
            m_foo = zest.stack_mock(pretend_unit_under_test.foo)

            def test_0():
                pretend_unit_under_test.foo()
                assert m_foo.called_once()

            def test_1():
                pretend_unit_under_test.foo()
                assert m_foo.called_once()

            zest()
Пример #8
0
def zest_Pipeline_calls():
    m_gc = zest.stack_mock(Pipeline._gc)
    m_p = zest.stack_mock(Pipeline._p)
    m_task1_start = zest.stack_mock(Task1.start)
    m_task2_start = zest.stack_mock(Task2.start)
    m_task3_start = zest.stack_mock(Task3.start)
    m_task1_is_dirty = zest.stack_mock(Task1.is_dirty)
    m_task2_is_dirty = zest.stack_mock(Task2.is_dirty)
    m_task3_is_dirty = zest.stack_mock(Task3.is_dirty)
    src_dir, dst_dir, tasks = None, None, None

    def _before():
        nonlocal src_dir, dst_dir, tasks
        src_dir = local.path("/tmp/test_pipeline")
        dst_dir = src_dir / "output"
        src_dir.delete()
        dst_dir.delete()
        src_dir.mkdir()

        tasks = dict(
            # (cls, params, kwargs)
            task1=(Task1, dict(inputs=dict(src_dir="")), {}),
            task2=(Task2, dict(inputs=dict(task1="../task1")), {}),
            task3=(Task3, dict(inputs=dict(task1="../task1", task2="../task2")), {}),
        )

    def _after():
        src_dir.delete()
        dst_dir.delete()

    def _set_dirty(dirty1, dirty2, dirty3):
        m_task1_is_dirty.returns(dirty1)
        m_task2_is_dirty.returns(dirty2)
        m_task3_is_dirty.returns(dirty3)

    def _p(**kwargs):
        return Pipeline(src_dir, dst_dir, tasks, **kwargs)

    def it_creates_dst_dirs():
        _set_dirty(True, True, True)
        _p()
        assert dst_dir.exists()
        assert (dst_dir / "task1").exists()
        assert (dst_dir / "task2").exists()
        assert (dst_dir / "task3").exists()

    def it_runs_all_targets_when_dirty():
        _set_dirty(True, True, True)
        _p()
        assert m_task1_start.called_once()
        assert m_task2_start.called_once()
        assert m_task3_start.called_once()

    def it_cleans_output_folders():
        _set_dirty(True, True, True)
        stuff = dst_dir / "task1"
        stuff.mkdir()
        stuff /= "stuff"
        stuff.touch()
        _p(clean=True)
        assert not stuff.exists()

    def it_cleans_and_early_outs():
        _set_dirty(True, True, True)
        _p(clean=True)
        assert not m_task1_start.called()
        assert not m_task2_start.called()
        assert not m_task3_start.called()

    def it_calls_in_order():
        time1 = None

        def _start_task1(*args, **kwargs):
            nonlocal time1
            time1 = time.time()

        time2 = None

        def _start_task2(*args, **kwargs):
            nonlocal time2
            time2 = time.time()

        time3 = None

        def _start_task3(*args, **kwargs):
            nonlocal time3
            time3 = time.time()

        m_task1_start.hook(_start_task1)
        m_task2_start.hook(_start_task2)
        m_task3_start.hook(_start_task3)
        _set_dirty(True, True, True)
        _p()
        assert time1 < time2 and time1 < time3 and time2 < time3
        m_task1_start.hook(None)
        m_task2_start.hook(None)
        m_task3_start.hook(None)

    def it_skips_task1_when_already_done():
        _set_dirty(False, True, False)
        _p()
        assert not m_task1_start.called()
        assert m_task2_start.called()
        assert not m_task3_start.called()
        # Note that task2 doesn't actually dirty anything so task 3 will not run
        # since the mock on set_dirty is set to False

    def it_runs_only_on_limited_targets():
        _set_dirty(True, True, True)
        _p(limit=["task2"])
        assert not m_task1_start.called()
        assert m_task2_start.called_once()
        assert not m_task3_start.called()

    def it_traps_exceptions_in_tasks():
        with zest.mock(Task1.error) as m_error:
            _set_dirty(True, False, False)
            e = Exception("problem")
            m_task1_start.exceptions(e)
            _p()
            assert m_error.called_once_with_kws(e=e)
            m_task1_start.exceptions(None)

    def it_does_not_run_if_upstream_error():
        _set_dirty(True, True, True)
        e = Exception("problem")
        m_task1_start.exceptions(e)
        _p()
        assert m_task1_start.called_once()
        assert not m_task2_start.called()
        assert not m_task3_start.called()
        assert (dst_dir / "task1" / PipelineState.error).exists()
        m_task1_start.exceptions(None)

    def it_forces_a_run_even_if_not_dirty():
        _set_dirty(False, False, False)
        _p(force=True)
        assert m_task1_start.called_once()
        assert m_task2_start.called_once()
        assert m_task3_start.called_once()

    def it_does_not_force_a_run_if_ignored():
        _set_dirty(True, False, False)
        _p(limit=["task2"], force=True)
        assert not m_task1_start.called()
        assert m_task2_start.called_once()
        assert not m_task3_start.called()

    def it_does_not_force_a_run_if_upstream_have_errors():
        _set_dirty(True, True, True)
        e = Exception("problem")
        m_task1_start.exceptions(e)
        _p(force=True)
        assert m_task1_start.called_once()
        assert not m_task2_start.called()
        assert not m_task3_start.called()
        m_task1_start.exceptions(None)

    def it_reruns_on_a_previous_failure():
        with zest.mock(
            Task1.get_output_state, returns=PipelineState.error
        ) as m_get_output_state:
            _set_dirty(True, False, False)
            _p()
            assert m_task1_start.called_once()
            assert not m_task2_start.called()
            assert not m_task3_start.called()

    def it_does_not_rerun_a_previous_success_if_no_upstream_changes():
        _set_dirty(False, False, False)
        _p()
        assert not m_task1_start.called()
        assert not m_task2_start.called()
        assert not m_task3_start.called()

    def it_does_run_on_upstream_change_even_if_previous_success():
        with zest.mock(
            Task1.get_output_state, returns=PipelineState.success
        ) as m_get_output_state:
            _set_dirty(True, True, True)
            Pipeline(src_dir, dst_dir, tasks)
            assert m_task1_start.called_once()
            assert m_task2_start.called_once()
            assert m_task3_start.called_once()

    def it_does_rerun_on_a_previous_failure():
        task1 = dst_dir / "task1"
        task1.mkdir()
        (task1 / PipelineState.error).touch()
        _set_dirty(True, True, True)
        Pipeline(src_dir, dst_dir, tasks)
        assert m_task1_start.called_once()
        assert m_task2_start.called_once()
        assert m_task3_start.called_once()

    def it_calls_the_task_success():
        with zest.mock(Task1.success) as m_success:
            _set_dirty(True, False, False)
            _p()
            assert m_success.called_once()

    def it_sets_uptodate():
        _set_dirty(False, False, False)
        _p()
        for task in ("task1", "task2", "task3"):
            assert (dst_dir / task / PipelineState.uptodate).exists()

    def it_clears_old_state():
        _set_dirty(True, False, False)
        e = Exception("problem")
        m_task1_start.exceptions(e)
        _p()
        assert m_task1_start.called_once()
        assert (dst_dir / "task1" / PipelineState.error).exists()
        m_task1_start.exceptions(None)

        # Let the second call pass
        m_task1_start.reset()
        _p()
        assert not (dst_dir / "task1" / PipelineState.error).exists()
        assert (dst_dir / "task1" / PipelineState.success).exists()

    def it_logs_state_changes():
        _set_dirty(True, False, False)
        p = _p()
        assert len(p._logs) == 4
        assert p._logs[0][1:3] == ("task1", "started")
        assert p._logs[1][1:3] == ("task1", "success")
        assert p._logs[2][1:3] == ("task2", "uptodate")
        assert p._logs[3][1:3] == ("task3", "uptodate")
        assert p.failed_count() == 0

    def it_logs_errors():
        _set_dirty(True, False, False)
        e = ValueError("test problem")
        m_task1_start.exceptions(e)
        p = _p()
        assert len(p._logs) == 4
        assert p._logs[0][1:3] == ("task1", "started")
        assert p._logs[1][1:4] == ("task1", "failed", "ValueError: test problem")
        assert p.failed_count() == 1
        m_task1_start.exceptions(None)

    zest()
Пример #9
0
def zest_all_df():
    props = Munch(
        signal_radmat=npf(
            [
                [[3.0, 2.0, 1.0], [1.0, 0.0, 1.0]],
                [[4.0, 2.0, 1.0], [1.0, 1.0, 0.0]],
                [[5.0, 2.0, 1.0], [1.0, 1.0, 1.0]],
                [[6.0, 3.0, 0.9], [1.0, 0.0, 1.0]],
            ]
        ),
        noise_radmat=npf(
            [
                [[0.3, 0.2, 0.1], [0.3, 0.2, 0.1]],
                [[0.4, 0.2, 0.1], [0.3, 0.2, 0.1]],
                [[0.5, 0.2, 0.1], [0.3, 0.2, 0.1]],
                [[0.6, 0.3, 2.9], [0.3, 0.2, 0.1]],
            ]
        ),
        localbg_radmat=npf(
            [
                [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]],
                [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]],
                [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]],
                [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]],
            ]
        ),
        peak_df=pd.DataFrame(
            [
                (0, 0, 0, 1.0, 11.0),
                (0, 0, 1, 2.0, 12.0),
                (0, 1, 0, 3.0, 13.0),
                (0, 1, 1, 4.0, 14.0),
            ],
            columns=["peak_i", "field_i", "field_peak_i", "aln_x", "aln_y"],
        ),
        field_df=pd.DataFrame(
            [
                (0, 0, 0, 1, 2, 0.1, 100.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 0, 1, 2, 3, 0.2, 110.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 0, 2, 3, 4, 0.3, 120.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 1, 0, 4, 5, 0.4, 130.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 1, 1, 5, 6, 0.5, 140.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 1, 2, 6, 7, 0.6, 150.0, 2, 10, 5, 10, 500, 20, 490),
                (1, 0, 0, 7, 8, 0.7, 160.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 0, 1, 8, 9, 0.8, 170.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 0, 2, 9, 0, 0.9, 180.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 1, 0, 0, 1, 0.0, 190.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 1, 1, 1, 2, 0.1, 200.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 1, 2, 2, 3, 0.2, 210.0, 2, 10, 6, 20, 510, 30, 480),
            ],
            columns=[
                "field_i",
                "channel_i",
                "cycle_i",
                "shift_y",
                "shift_x",
                "aln_score",
                "bg_median",
                "n_mask_rects",
                "mask_area",
                "border_size",
                "aligned_roi_l",
                "aligned_roi_r",
                "aligned_roi_b",
                "aligned_roi_t",
            ],
        ),
        mask_rects_df=pd.DataFrame(
            [
                (0, 0, 0, 1, 2, 3, 4),
                (0, 0, 1, 2, 2, 3, 4),
                (0, 0, 2, 3, 2, 3, 4),
                (0, 1, 0, 4, 2, 3, 4),
                (0, 1, 1, 5, 2, 3, 4),
                (0, 1, 2, 6, 2, 3, 4),
                (1, 0, 0, 7, 2, 3, 4),
                (1, 0, 1, 8, 2, 3, 4),
                (1, 0, 2, 9, 2, 3, 4),
                (1, 1, 0, 0, 2, 3, 4),
                (1, 1, 1, 1, 2, 3, 4),
                (1, 1, 2, 2, 2, 3, 4),
            ],
            columns=["field_i", "channel_i", "cycle_i", "l", "r", "w", "h",],
        ),
    )

    def _mock_load_field_prop(inst, field_i, prop):
        return props[prop]

    zest.stack_mock(
        SigprocV1Result._load_field_prop,
        substitute_fn=_mock_load_field_prop,
        reset_before_each=False,
    )

    res = SigprocV1Result(
        is_loaded_result=True, field_files=[""], n_peaks=4, n_channels=2, n_cycles=3
    )

    def it_np_signal_radmat():
        assert np_array_same(res.signal_radmat(), props.signal_radmat)

    def it_np_noise_radmat():
        assert np_array_same(res.noise_radmat(), props.noise_radmat)

    # All of the following are testing the DataFrames

    def it_fields():
        assert res.fields().equals(props.field_df)

    def it_radmats():
        rad_df = res.radmats()
        check.df_t(rad_df, SigprocV1Result.radmat_df_schema)
        assert len(rad_df) == 4 * 2 * 3

        # Sanity check a few
        assert (
            rad_df[
                (rad_df.peak_i == 1) & (rad_df.channel_i == 1) & (rad_df.cycle_i == 1)
            ].signal.values[0]
            == 1.0
        )
        assert (
            rad_df[
                (rad_df.peak_i == 2) & (rad_df.channel_i == 0) & (rad_df.cycle_i == 0)
            ].signal.values[0]
            == 5.0
        )

    def it_mask_rects():
        rects_df = res.mask_rects()
        check.df_t(rects_df, SigprocV1Result.mask_rects_df_schema)
        assert len(rects_df) == 2 * 2 * 3

    # TASK: Lots of work left here

    # def it_peaks():
    #     assert res.peaks()[["field_i", "field_peak_i", "aln_x", "aln_y"]].equals(
    #         props.peak_df[["field_i", "field_peak_i", "aln_x", "aln_y"]]
    #     )
    #     assert np.all(res.peaks().peak_i.values == np.arange(4))

    # def it_radmats__peaks():
    #     df = res.radmats__peaks()
    #
    #     # Sanity check a few
    #     assert df[(df.peak_i == 1) & (df.channel_i == 0) & (df.cycle_i == 0)].signal.values[0] == 4.0
    #     assert df[(df.peak_i == 3) & (df.channel_i == 0) & (df.cycle_i == 2)].signal.values[0] == 0.9
    #     assert np.all(df[df.peak_i == 3].aln_x.values == 4.0)
    #     assert np.all(df[df.peak_i == 3].aln_y.values == 14.0)
    #
    # def it_n_peaks():
    #     df = res.n_peaks()
    #     assert np.all(df.n_peaks.values == 2)
    #
    # def it_field__n_peaks__peaks():
    #     df = res.field__n_peaks__peaks()
    #     raise NotImplementedError
    #
    # def it_fields__n_peaks__radmat__peaks():
    #     df = res.fields__n_peaks__radmat__peaks()
    #     debug(df)
    #     raise NotImplementedError

    zest()
Пример #10
0
def zest_v2_all_df():
    props = Munch(
        radmat=npf(
            [
                [
                    [[3.0, 2.0, 1.0], [1.0, 0.0, 1.0]],
                    [[4.0, 2.0, 1.0], [1.0, 1.0, 0.0]],
                    [[5.0, 2.0, 1.0], [1.0, 1.0, 1.0]],
                    [[6.0, 3.0, 0.9], [1.0, 0.0, 1.0]],
                ],
                [
                    [[0.3, 0.2, 0.1], [0.3, 0.2, 0.1]],
                    [[0.4, 0.2, 0.1], [0.3, 0.2, 0.1]],
                    [[0.5, 0.2, 0.1], [0.3, 0.2, 0.1]],
                    [[0.6, 0.3, 2.9], [0.3, 0.2, 0.1]],
                ],
                [
                    [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
                    [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
                    [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
                    [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
                ],
                [
                    [[1.1, 1.2, 1.3], [1.4, 1.5, 1.6]],
                    [[1.1, 1.2, 1.3], [1.4, 1.5, 1.6]],
                    [[1.1, 1.2, 1.3], [1.4, 1.5, 1.6]],
                    [[1.1, 1.2, 1.3], [1.4, 1.5, 1.6]],
                ],
            ],
        ),
        peak_df=pd.DataFrame(
            [
                (0, 0, 0, 1.0, 11.0),
                (0, 0, 1, 2.0, 12.0),
                (0, 1, 0, 3.0, 13.0),
                (0, 1, 1, 4.0, 14.0),
            ],
            columns=["peak_i", "field_i", "field_peak_i", "aln_x", "aln_y"],
        ),
        field_df=pd.DataFrame(
            [
                (0, 0, 0, 1, 2, 0.1, 100.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 0, 1, 2, 3, 0.2, 110.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 0, 2, 3, 4, 0.3, 120.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 1, 0, 4, 5, 0.4, 130.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 1, 1, 5, 6, 0.5, 140.0, 2, 10, 5, 10, 500, 20, 490),
                (0, 1, 2, 6, 7, 0.6, 150.0, 2, 10, 5, 10, 500, 20, 490),
                (1, 0, 0, 7, 8, 0.7, 160.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 0, 1, 8, 9, 0.8, 170.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 0, 2, 9, 0, 0.9, 180.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 1, 0, 0, 1, 0.0, 190.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 1, 1, 1, 2, 0.1, 200.0, 2, 10, 6, 20, 510, 30, 480),
                (1, 1, 2, 2, 3, 0.2, 210.0, 2, 10, 6, 20, 510, 30, 480),
            ],
            columns=[
                "field_i",
                "channel_i",
                "cycle_i",
                "shift_y",
                "shift_x",
                "aln_score",
                "bg_median",
                "n_mask_rects",
                "mask_area",
                "border_size",
                "aligned_roi_l",
                "aligned_roi_r",
                "aligned_roi_b",
                "aligned_roi_t",
            ],
        ),
        mask_rects_df=pd.DataFrame(
            [
                (0, 0, 0, 1, 2, 3, 4),
                (0, 0, 1, 2, 2, 3, 4),
                (0, 0, 2, 3, 2, 3, 4),
                (0, 1, 0, 4, 2, 3, 4),
                (0, 1, 1, 5, 2, 3, 4),
                (0, 1, 2, 6, 2, 3, 4),
                (1, 0, 0, 7, 2, 3, 4),
                (1, 0, 1, 8, 2, 3, 4),
                (1, 0, 2, 9, 2, 3, 4),
                (1, 1, 0, 0, 2, 3, 4),
                (1, 1, 1, 1, 2, 3, 4),
                (1, 1, 2, 2, 2, 3, 4),
            ],
            columns=["field_i", "channel_i", "cycle_i", "l", "r", "w", "h",],
        ),
    )

    # ramar got refactored and I just reorder it here
    props["radmat"] = np.moveaxis(props["radmat"], 0, 3)

    def _mock_load_field_prop(inst, field_i, prop):
        return props[prop]

    zest.stack_mock(
        SigprocV2Result._load_field_prop,
        substitute_fn=_mock_load_field_prop,
        reset_before_each=False,
    )

    res = SigprocV2Result(
        is_loaded_result=True,
        field_files=[""],
        n_peaks=4,
        n_channels=2,
        n_cycles=3,
        n_fields=1,
    )

    def it_np_signal_radmat():
        assert np_array_same(res.sig(), props.radmat[:, :, :, 0])

    def it_np_noise_radmat():
        assert np_array_same(res.noi(), props.radmat[:, :, :, 1])

    # All of the following are testing the DataFrames

    def it_fields():
        assert res.fields().equals(props.field_df)

    def it_radmats():
        rad_df = res.radmats()
        check.df_t(rad_df, SigprocV2Result.radmat_df_schema)
        assert len(rad_df) == 4 * 2 * 3

        # Sanity check a few
        assert (
            rad_df[
                (rad_df.peak_i == 1) & (rad_df.channel_i == 1) & (rad_df.cycle_i == 1)
            ].signal.values[0]
            == 1.0
        )
        assert (
            rad_df[
                (rad_df.peak_i == 2) & (rad_df.channel_i == 0) & (rad_df.cycle_i == 0)
            ].signal.values[0]
            == 5.0
        )

    zest()
Пример #11
0
def zest_ims_import():
    tmp_src = tempfile.NamedTemporaryFile()
    tmp_dst = tempfile.TemporaryDirectory()

    src_path = local.path(tmp_src.name)
    with local.cwd(local.path(tmp_dst.name)):

        m_scan_nd2_files = zest.stack_mock(worker._scan_nd2_files)
        m_scan_tif_files = zest.stack_mock(worker._scan_tif_files)
        m_scan_npy_files = zest.stack_mock(worker._scan_npy_files)
        m_nd2 = zest.stack_mock(worker._nd2)

        n_cycles = 2
        n_fields = 3
        n_channels = 4
        cycle_files = [(src_path / f"{i}.nd2") for i in range(n_cycles)]

        def _make_nd2(dim, fill_by="channel", n_cycles=None):
            return MockND2(
                n_fields=n_fields,
                n_channels=n_channels,
                dim=(dim, dim),
                x=[0] * n_fields,
                y=[0] * n_fields,
                z=[0] * n_fields,
                pfs_status=[0] * n_fields,
                pfs_offset=[0] * n_fields,
                exposure_time=[0] * n_fields,
                camera_temp=[0] * n_fields,
                _fill_by=fill_by,
                _n_cycles=n_cycles,
            )

        ims_import_params = None
        nd2 = None

        def _before():
            nonlocal ims_import_params, nd2
            ims_import_params = ImsImportParams()
            m_nd2.hook_to_call = lambda _: _make_nd2(64)
            m_scan_nd2_files.returns(cycle_files)
            m_scan_tif_files.returns([])
            m_scan_npy_files.returns([])

        def it_scatter_gathers():
            result = worker.ims_import(src_path, ims_import_params)
            emitted_files = list(local.path(".").walk())
            assert len(emitted_files) == 9
            assert result.params == ims_import_params
            assert result.n_fields == n_fields
            assert result.n_channels == n_channels
            assert result.n_cycles == n_cycles

        def it_converts_to_power_of_2():
            with zest.mock(worker._convert_message):
                m_nd2.hook_to_call = lambda _: _make_nd2(63)
                result = worker.ims_import(src_path, ims_import_params)
                assert result.field_chcy_ims(0).shape == (n_channels, n_cycles,
                                                          64, 64)

        def it_limits_fields():
            ims_import_params.n_fields_limit = 1
            result = worker.ims_import(src_path, ims_import_params)
            assert result.n_fields == 1

        def it_imports_src_channels():
            result = worker.ims_import(src_path, ims_import_params)
            assert np.all(result.field_chcy_ims(0)[0, :, :, :] == 0.0)
            assert np.all(result.field_chcy_ims(0)[1, :, :, :] == 1.0)

        def it_can_skip_fields():
            ims_import_params.start_field = 1
            result = worker.ims_import(src_path, ims_import_params)
            assert result.n_fields == 2

        def it_can_limit_cycles():
            ims_import_params.n_cycles_limit = 1
            result = worker.ims_import(src_path, ims_import_params)
            assert result.n_fields == n_fields
            assert result.n_cycles == n_cycles - 1
            assert result.n_channels == n_channels

        def it_can_skip_cycles():
            ims_import_params.start_cycle = 1
            result = worker.ims_import(src_path, ims_import_params)
            assert result.n_fields == n_fields
            assert result.n_cycles == n_cycles - 1
            assert result.n_channels == n_channels

        def it_respects_channel_map():
            ims_import_params.dst_ch_i_to_src_ch_i = [1, 0]
            result = worker.ims_import(src_path, ims_import_params)
            assert result.n_fields == n_fields
            assert result.n_cycles == n_cycles
            assert result.n_channels == 2
            assert np.all(result.field_chcy_ims(0)[0, :, :, :] == float(1))
            assert np.all(result.field_chcy_ims(0)[1, :, :, :] == float(0))

        def movies():
            def _before():
                nonlocal ims_import_params, nd2
                ims_import_params = ImsImportParams(is_movie=True)
                m_nd2.hook_to_call = lambda _: _make_nd2(64, "cycle", n_fields)
                m_scan_nd2_files.returns(cycle_files)
                m_scan_tif_files.returns([])

            def it_swaps_fields_cycles():
                result = worker.ims_import(src_path, ims_import_params)
                assert result.n_cycles == n_fields
                assert result.n_fields == n_cycles
                assert result.n_channels == n_channels
                for cy in range(result.n_cycles):
                    assert np.all(
                        result.field_chcy_ims(0)[:, cy, :, :] == float(cy))

            def it_can_limit_cycles():
                ims_import_params.n_cycles_limit = 2
                result = worker.ims_import(src_path, ims_import_params)
                assert result.n_cycles == 2
                assert result.n_fields == n_cycles
                assert result.n_channels == n_channels
                for cy in range(result.n_cycles):
                    assert np.all(
                        result.field_chcy_ims(0)[:, cy, :, :] == float(cy))

            def it_can_skip_cycles():
                ims_import_params.start_cycle = 1
                result = worker.ims_import(src_path, ims_import_params)
                assert result.n_cycles == n_fields - 1
                assert result.n_fields == n_cycles
                assert result.n_channels == n_channels
                for cy in range(result.n_cycles):
                    assert np.all(
                        result.field_chcy_ims(0)[:, cy, :, :] == float(cy + 1))

            def it_converts_to_power_of_2():
                with zest.mock(worker._convert_message):
                    m_nd2.hook_to_call = lambda _: _make_nd2(
                        63, "cycle", n_fields)
                    result = worker.ims_import(src_path, ims_import_params)
                    assert result.field_chcy_ims(0).shape == (
                        n_channels,
                        n_fields,
                        64,
                        64,
                    )

            def it_respects_channel_map():
                nonlocal ims_import_params, nd2
                ims_import_params = ImsImportParams(is_movie=True)
                m_nd2.hook_to_call = lambda _: _make_nd2(64)  # Channel mode
                m_scan_nd2_files.returns(cycle_files)
                m_scan_tif_files.returns([])

                ims_import_params.dst_ch_i_to_src_ch_i = [1, 0]
                result = worker.ims_import(src_path, ims_import_params)
                assert result.n_cycles == n_fields
                assert result.n_fields == n_cycles
                assert result.n_channels == 2
                assert np.all(result.field_chcy_ims(0)[0, :, :, :] == float(1))
                assert np.all(result.field_chcy_ims(0)[1, :, :, :] == float(0))

            zest()

        zest()
Пример #12
0
def zest_ims_import_from_npy():
    tmp_dst = tempfile.TemporaryDirectory()
    with local.cwd(local.path(tmp_dst.name)):
        m_scan_nd2_files = zest.stack_mock(worker._scan_nd2_files)
        m_scan_tif_files = zest.stack_mock(worker._scan_tif_files)
        m_scan_npy_files = zest.stack_mock(worker._scan_npy_files)
        m_load_npy = zest.stack_mock(worker._load_npy)

        ims_import_params = None

        def _before():
            nonlocal ims_import_params
            ims_import_params = ImsImportParams()
            m_scan_nd2_files.returns([])
            m_scan_tif_files.returns([])
            m_load_npy.returns(np.zeros((16, 16)))

        def npy_v0():
            npy_files = [
                # area, field, channel, cycle
                "area_000_cell_000_555nm_001.npy",
                "area_000_cell_000_647nm_001.npy",
                "area_000_cell_000_555nm_002.npy",
                "area_000_cell_000_647nm_002.npy",
                "area_000_cell_000_555nm_003.npy",
                "area_000_cell_000_647nm_003.npy",
                "area_000_cell_001_555nm_001.npy",
                "area_000_cell_001_647nm_001.npy",
                "area_000_cell_001_555nm_002.npy",
                "area_000_cell_001_647nm_002.npy",
                "area_000_cell_001_555nm_003.npy",
                "area_000_cell_001_647nm_003.npy",
            ]

            def _before():
                m_scan_npy_files.returns(npy_files)

            def it_scans_npy_arrays():
                scan_result = worker._scan_files("")

                assert scan_result.mode == worker.ScanFileMode.npy
                assert scan_result.nd2_paths == []
                assert scan_result.tif_paths_by_field_channel_cycle == {}
                assert (local.path(
                    scan_result.npy_paths_by_field_channel_cycle[(
                        0, 0, 0)]).name == npy_files[0])
                assert (scan_result.n_fields == 2
                        and scan_result.n_channels == 2
                        and scan_result.n_cycles == 3)
                assert scan_result.dim == (16, 16)

            def it_ims_import_npy():
                res = worker.ims_import(".",
                                        ims_import_params,
                                        progress=None,
                                        pipeline=None)
                assert res.n_fields == 2 and res.n_channels == 2 and res.n_cycles == 3

            zest()

        def npy_v1():
            npy_files = [
                # field, channel, cycle
                "test01_fl0000_ch0_cy000.npy",
                "test01_fl0001_ch0_cy000.npy",
                "test01_fl0000_ch0_cy001.npy",
                "test01_fl0001_ch0_cy001.npy",
                "test01_fl0000_ch0_cy002.npy",
                "test01_fl0001_ch0_cy002.npy",
                "test01_fl0000_ch1_cy000.npy",
                "test01_fl0001_ch1_cy000.npy",
                "test01_fl0000_ch1_cy001.npy",
                "test01_fl0001_ch1_cy001.npy",
                "test01_fl0000_ch1_cy002.npy",
                "test01_fl0001_ch1_cy002.npy",
            ]

            def _before():
                m_scan_npy_files.returns(npy_files)

            def it_scans_npy_arrays():
                scan_result = worker._scan_files("")

                assert scan_result.mode == worker.ScanFileMode.npy
                assert scan_result.nd2_paths == []
                assert scan_result.tif_paths_by_field_channel_cycle == {}
                assert (local.path(
                    scan_result.npy_paths_by_field_channel_cycle[(
                        0, 0, 0)]).name == npy_files[0])

                assert (scan_result.n_fields == 2
                        and scan_result.n_channels == 2
                        and scan_result.n_cycles == 3)
                assert scan_result.dim == (16, 16)

            def it_ims_import_npy():
                res = worker.ims_import(".",
                                        ims_import_params,
                                        progress=None,
                                        pipeline=None)
                assert res.n_fields == 2 and res.n_channels == 2 and res.n_cycles == 3

            zest()

        zest()
Пример #13
0
def zest_ims_import_from_npy():
    tmp_dst = tempfile.TemporaryDirectory()
    with local.cwd(local.path(tmp_dst.name)):
        m_scan_nd2_files = zest.stack_mock(worker._scan_nd2_files)
        m_scan_tif_files = zest.stack_mock(worker._scan_tif_files)
        m_scan_npy_files = zest.stack_mock(worker._scan_npy_files)
        m_load_npy = zest.stack_mock(worker._load_npy)

        npy_files = [
            # area, field, channel, cycle
            "area_000_cell_000_555nm_001.npy",
            "area_000_cell_000_647nm_001.npy",
            "area_000_cell_000_555nm_002.npy",
            "area_000_cell_000_647nm_002.npy",
            "area_000_cell_000_555nm_003.npy",
            "area_000_cell_000_647nm_003.npy",
            "area_000_cell_001_555nm_001.npy",
            "area_000_cell_001_647nm_001.npy",
            "area_000_cell_001_555nm_002.npy",
            "area_000_cell_001_647nm_002.npy",
            "area_000_cell_001_555nm_003.npy",
            "area_000_cell_001_647nm_003.npy",
        ]

        ims_import_params = None

        def _before():
            nonlocal ims_import_params
            ims_import_params = ImsImportParams()
            m_scan_nd2_files.returns([])
            m_scan_tif_files.returns([])
            m_scan_npy_files.returns(npy_files)
            m_load_npy.returns(np.zeros((16, 16)))

        def it_scans_npy_arrays():
            (
                mode,
                nd2_paths,
                tif_paths_by_field_channel_cycle,
                npy_paths_by_field_channel_cycle,
                n_fields,
                n_channels,
                n_cycles,
                dim,
            ) = worker._scan_files("")

            assert mode == "npy"
            assert nd2_paths == []
            assert tif_paths_by_field_channel_cycle == {}
            assert (local.path(
                npy_paths_by_field_channel_cycle[(0, 0,
                                                  0)]).name == npy_files[0])
            assert n_fields == 2 and n_channels == 2 and n_cycles == 3
            assert dim == (16, 16)

        def it_ims_import_npy():
            res = worker.ims_import(".",
                                    ims_import_params,
                                    progress=None,
                                    pipeline=None)
            assert res.n_fields == 2 and res.n_channels == 2 and res.n_cycles == 3

        zest()
Пример #14
0
def zest_protein_csv():
    zest.stack_mock(helpers._protein_csv_info)

    def it_accepts_name_and_sequence():
        csv_string = """
        Name, Seq
        P1, ABC
        P2, DEF
        """
        df = helpers.protein_csv_df(csv_string)
        assert df.loc[0, "seqstr"] == "ABC" and df.loc[0, "id"] == "P1"
        assert df.loc[1, "seqstr"] == "DEF" and df.loc[1, "id"] == "P2"

        def it_defaults_ptm_locs_to_empty_str():
            assert df.loc[0, "ptm_locs"] == ""

        def it_defaults_abundance_to_nan():
            assert np.isnan(df.loc[0, "abundance"])

        zest()

    def it_raises_if_no_seq_nor_uniprot_ac():
        csv_string = """
        Name, Abundance
        A, 10
        """
        with zest.raises(ValueError) as e:
            helpers.protein_csv_df(csv_string)
        assert "missing either a Seq or a UniprotAC" in str(e.exception)

    def it_raises_if_both_seq_and_uniprot_ac():
        csv_string = """
        Name, Seq, UniprotAC
        P1, A, P100
        """
        with zest.raises(ValueError) as e:
            helpers.protein_csv_df(csv_string)
        assert "both a Seq and a UniprotAC" in str(e.exception)

    def it_raises_if_no_name_and_no_uniprot_ac():
        csv_string = """
        Seq, Abundance
        ABC, 10
        """
        with zest.raises(ValueError) as e:
            helpers.protein_csv_df(csv_string)
        assert "missing a Name column" in str(e.exception)

    def it_reverse_sorts_by_abundance():
        csv_string = """
        Name, Seq, Abundance
        P1, ABC, 10
        P2, DEF, 100
        """
        df = helpers.protein_csv_df(csv_string)
        assert (df.loc[0, "seqstr"] == "DEF" and df.loc[0, "id"] == "P2"
                and df.loc[0, "abundance"] == 100.0)
        assert (df.loc[1, "seqstr"] == "ABC" and df.loc[1, "id"] == "P1"
                and df.loc[1, "abundance"] == 10.0)

    def it_sorts_by_name_if_no_abundance():
        csv_string = """
        Name, Seq
        P2, DEF
        P1, ABC
        """
        df = helpers.protein_csv_df(csv_string)
        assert df.loc[0, "seqstr"] == "ABC" and df.loc[0, "id"] == "P1"
        assert df.loc[1, "seqstr"] == "DEF" and df.loc[1, "id"] == "P2"

    def it_imports_ptm():
        csv_string = """
        Name, Seq, PTM
        P1, ABC, 3
        P2, DEF, 1;2
        P3, GHI, 
        """
        df = helpers.protein_csv_df(csv_string)
        assert df.loc[0, "ptm_locs"] == "3"
        assert df.loc[1, "ptm_locs"] == "1;2"
        assert df.loc[2, "ptm_locs"] == ""

    def it_lookups_uniprot():
        csv_string = """
        UniprotAC, Abundance
        P1, 10
        """
        with zest.mock(helpers._uniprot_lookup) as m:
            m.returns([{"id:": "foo", "seqstr": "ABC"}])
            df = helpers.protein_csv_df(csv_string)
        assert df.loc[0, "seqstr"] == "ABC"

        def it_uses_uniprot_ac_as_name():
            assert df.loc[0, "id"] == "P1"

        def it_imports_abundance():
            assert df.loc[0, "abundance"] == 10.0

        zest()

    def it_nans_missing_abundances():
        csv_string = """
        UniprotAC
        P1
        """
        with zest.mock(helpers._uniprot_lookup) as m:
            m.returns([{"id:": "foo", "seqstr": "ABC"}])
            df = helpers.protein_csv_df(csv_string)
        assert (df.loc[0, "id"] == "P1" and df.loc[0, "seqstr"] == "ABC"
                and np.isnan(df.loc[0, "abundance"]))

    def it_warns_on_no_seq_from_uniprot():
        csv_string = """
        UniprotAC
        P1
        """
        with zest.mock(helpers._protein_csv_warning) as m_warn:
            with zest.mock(helpers._uniprot_lookup) as m_lookup:
                m_lookup.returns([])
                helpers.protein_csv_df(csv_string)
        assert m_warn.called_once()

    def it_warns_on_more_than_one_seq_from_uniprot():
        csv_string = """
        UniprotAC
        P1
        """
        with zest.mock(helpers._protein_csv_warning) as m_warn:
            with zest.mock(helpers._uniprot_lookup) as m_lookup:
                m_lookup.returns([{
                    "id": "foo",
                    "seqstr": "123"
                }, {
                    "id": "bar",
                    "seqstr": "123456"
                }])
                df = helpers.protein_csv_df(csv_string)
                assert len(df) == 1 and df.loc[0, "seqstr"] == "123456"
        assert m_warn.called_once()

    def it_raises_on_duplicate_names():
        csv_string = """
        Name, Seq
        P1, ABC
        P1, DEF
        """
        with zest.raises(ValueError) as e:
            helpers.protein_csv_df(csv_string)
        assert "duplicate names" in str(e.exception)

    def it_raises_on_duplicate_seqs():
        csv_string = """
        Name, Seq
        P1, ABC
        P2, ABC
        """
        with zest.raises(ValueError) as e:
            helpers.protein_csv_df(csv_string)
        assert "duplicate seqs" in str(e.exception)

    zest()
Пример #15
0
def zest_check():
    zest.stack_mock(check.log.error)

    def it_converts_none_to_type_none_scalar():
        a = None
        check.t(a, None)

    def it_converts_none_to_type_none_tuple():
        a = None
        check.t(a, (None,))

    def it_gets_var_name_and_source():
        with zest.raises(check.CheckError) as e:
            test_func()
        assert e.exception.var_name == "some_float"
        assert "zest_check.py" in e.exception.source

    def it_checks_type_tuples():
        some_float = 1.0
        some_int = 1
        check.t(some_float, (float, int))
        check.t(some_int, (float, int))

    def it_checks_lists():
        l = [1, 2, 3]
        check.list_t(l, int)

        l = [1, 2, 3, 4.0]
        with zest.raises(check.CheckError):
            check.list_t(l, int)

        t = (1, 2, 3)
        with zest.raises(check.CheckError):
            check.list_t(t, int)

    def it_checks_lists_or_tuples():
        l = [1, 2, 3]
        check.list_or_tuple_t(l, int)

        t = (1, 2, 3)
        check.list_or_tuple_t(t, int)

        l = [1, 2, 3.0]
        with zest.raises(check.CheckError):
            check.list_or_tuple_t(l, int)

        t = (1, 2, 3.0)
        with zest.raises(check.CheckError):
            check.list_or_tuple_t(t, int)

    def zest_arrays():
        arr = np.array([1, 2, 3])

        def it_checks_is_array():
            with zest.raises(check.CheckError):
                check.array_t([])

        def it_checks_is_dtype_if_specified():
            check.array_t(arr, dtype=np.int64)

            with zest.raises(check.CheckError):
                check.array_t(arr, dtype=np.float64)

        def it_prints_array_shape_if_not_specified():
            with zest.mock(check._print) as p:
                check.array_t(arr)
            assert "(3,)" in p.normalized_call()["msg"]

        zest()

    def zest_test():
        def it_passes():
            check.affirm(True)

        def it_raises_checkerror_by_default():
            with zest.raises(check.CheckAffirmError):
                check.affirm(False)

        def it_accepts_exception_type():
            with zest.raises(ValueError):
                check.affirm(False, exp=ValueError)

        def it_accepts_exception_instance():
            with zest.raises(ValueError) as e:
                check.affirm(False, exp=ValueError())

        def it_pushes_msg():
            with zest.raises(check.CheckAffirmError) as e:
                check.affirm(False, "abc")
            assert e.exception.message == "abc"

        zest()

    zest()
Пример #16
0
def zest_schema():
    m_print_error = zest.stack_mock(s._print_error)
    m_print_help = zest.stack_mock(s._print_help)

    def _it_validates_noneable(schema_func):
        test_s = s(schema_func())
        with zest.raises(SchemaValidationFailed):
            test_s.validate(None)

        test_s = s(schema_func(noneable=True))
        test_s.validate(None)

    def it_raises_on_a_bad_schema():
        with zest.raises(SchemaInvalid):
            s(1)

    def it_prints_by_default():
        with zest.raises(SchemaValidationFailed):
            test_s = s(s.is_int())
            test_s.validate("not int")
        assert m_print_error.called()

    def it_raises_by_default():
        with zest.raises(SchemaValidationFailed):
            test_s = s(s.is_int())
            test_s.validate("not int")

    def check_bounds():
        def it_validates_min_val():
            s._check_bounds(5, bounds=(5, None))
            with zest.raises(SchemaValidationFailed):
                s._check_bounds(3, bounds=(5, None))

        def it_validates_max_val():
            s._check_bounds(5, bounds=(None, 5))
            with zest.raises(SchemaValidationFailed):
                s._check_bounds(6, bounds=(None, 5))

        def it_validates_bounds():
            s._check_bounds(4, bounds=(4, 6))
            s._check_bounds(6, bounds=(4, 6))
            with zest.raises(SchemaValidationFailed):
                s._check_bounds(3, bounds=(4, 6))
                s._check_bounds(7, bounds=(4, 6))

        def it_raises_if_bounds_not_valid():
            with zest.raises(SchemaInvalid):
                s._check_bounds_arg(bounds=4)
            with zest.raises(SchemaInvalid):
                s._check_bounds_arg(bounds=("a", "b"))
            with zest.raises(SchemaInvalid):
                s._check_bounds_arg(bounds=())

        zest()

    def types():
        def is_int():
            def it_validates_int():
                test_s = s(s.is_int())
                test_s.validate(1)
                with zest.raises(SchemaValidationFailed):
                    test_s.validate("a str")
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1.0)

            def it_validates_noneable():
                _it_validates_noneable(s.is_int)

            zest()

        def is_float():
            def it_validates_float():
                test_s = s(s.is_float())
                test_s.validate(1.0)
                with zest.raises(SchemaValidationFailed):
                    test_s.validate("a str")
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1)

            def it_validates_noneable():
                _it_validates_noneable(s.is_float)

            zest()

        def is_number():
            def it_validates_number():
                test_s = s(s.is_number())
                test_s.validate(1.0)
                test_s.validate(1)
                with zest.raises(SchemaValidationFailed):
                    test_s.validate("a str")

            def it_validates_noneable():
                _it_validates_noneable(s.is_number)

            zest()

        def is_str():
            def it_validates_str():
                test_s = s(s.is_str())
                test_s.validate("test")
                test_s.validate("")
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1)

            def it_validates_noneable():
                _it_validates_noneable(s.is_str)

            def it_validates_allow_empty_string():
                test_s = s(s.is_str(allow_empty_string=False))
                test_s.validate("test")
                with zest.raises(SchemaValidationFailed):
                    test_s.validate("")

                test_s = s(s.is_str())
                test_s.validate("test")
                test_s.validate("")

            zest()

        def is_bool():
            def it_validates_bool():
                test_s = s(s.is_bool())
                test_s.validate(True)
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1)

            def it_validates_noneable():
                _it_validates_noneable(s.is_str)

            zest()

        def is_deprecated():
            def it_validates_any_usage():
                test_s = s(s.is_deprecated())
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1)

            def it_never_requires_deprecated():
                test_s = s(s.is_kws_r(a=s.is_deprecated()))
                test_s.validate(dict())

            zest()

        def is_list():
            def it_validates_default_list():
                test_s = s(s.is_list())
                test_s.validate([])
                test_s.validate([1, 2, 3, "str", dict(), []])
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1)

            def it_has_elems_as_first_arg():
                test_s = s(s.is_list(s.is_int()))
                test_s.validate([1])
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(["str"])

            def it_validates_noneable():
                _it_validates_noneable(s.is_list)

            def it_validates_default_list_elems_int():
                test_s = s(s.is_list(elems=s.is_int()))
                test_s.validate([1, 2, 3])
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1)
                with zest.raises(SchemaValidationFailed):
                    test_s.validate([1, "str"])

            def it_checks_bound_type():
                with zest.raises(SchemaInvalid):
                    s(s.is_list(min_len="str"))
                with zest.raises(SchemaInvalid):
                    s(s.is_list(max_len="str"))

            def it_bounds_min():
                test_s = s(s.is_list(min_len=2))
                test_s.validate([1, 2])
                with zest.raises(SchemaValidationFailed):
                    test_s.validate([1])
                with zest.raises(SchemaValidationFailed):
                    test_s.validate([])

            def it_bounds_max():
                test_s = s(s.is_list(max_len=2))
                test_s.validate([])
                test_s.validate([1, 2])
                with zest.raises(SchemaValidationFailed):
                    test_s.validate([1, 2, 3])

            zest()

        def is_dict():
            def it_validates_default_dict():
                test_s = s(s.is_dict())
                test_s.validate({})
                test_s.validate(dict(a=1, b=2))
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(1)

            def it_validates_noneable():
                _it_validates_noneable(s.is_dict)

            def it_ignores_underscored_keys():
                test_s = s(
                    s.is_dict(elems=dict(a=s.is_int(), b=s.is_int()),
                              no_extras=True))
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(dict(a=1, b="str"))
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(dict(a=1, b=2, _c=[]))

                test_s = s(
                    s.is_dict(
                        elems=dict(a=s.is_int(), b=s.is_int()),
                        no_extras=True,
                        ignore_underscored_keys=True,
                    ))
                test_s.validate(dict(a=1, b=2, _c=[]))

            def it_all_required_false_by_default():
                test_s = s(s.is_dict(elems=dict(a=s.is_int(), b=s.is_int())))
                test_s.validate(dict(a=1))

            def it_checks_all_required():
                test_s = s(
                    s.is_dict(elems=dict(a=s.is_int(), b=s.is_int()),
                              all_required=True))
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(dict(a=1))

            def it_allows_all_required_to_be_overriden():
                test_s = s(
                    s.is_dict(elems=dict(a=s.is_int(required=False)),
                              all_required=True))
                test_s.validate(dict())

            def it_checks_no_extra_flase_by_default():
                test_s = s(s.is_dict(elems=dict(a=s.is_int(), b=s.is_int())))
                test_s.validate(dict(a=1, c=1))

            def it_checks_no_extra():
                test_s = s(
                    s.is_dict(elems=dict(a=s.is_int(), b=s.is_int()),
                              no_extras=True))
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(dict(a=1, b=1, c=1))

            def it_checks_key_type_str():
                test_s = s(s.is_dict(elems={1: s.is_int()}))
                with zest.raises(SchemaValidationFailed):
                    test_s.validate({1: 2})

            def it_checks_required():
                test_s = s(
                    s.is_dict(
                        elems=dict(a=s.is_int(required=True), b=s.is_int())))
                with zest.raises(SchemaValidationFailed):
                    test_s.validate(dict(b=1))

            zest()

        zest()

    class TestType:
        pass

    def is_type():
        def it_validates_type():
            test_s = s(s.is_type(TestType))
            test_s.validate(TestType())
            with zest.raises(SchemaValidationFailed):
                test_s.validate("a str")
            with zest.raises(SchemaValidationFailed):
                test_s.validate(1.0)

        def it_validates_noneable():
            test_s = s(s.is_type(TestType))
            with zest.raises(SchemaValidationFailed):
                test_s.validate(None)
            test_s = s(s.is_type(TestType, noneable=True))
            test_s.validate(None)

        zest()

    def it_validates_recursively():
        test_s = s(
            s.is_dict(elems=dict(
                a=s.is_int(),
                b=s.is_list(required=True, elems=s.is_str()),
                c=s.is_dict(required=True),
            )))
        test_s.validate(dict(a=1, b=["a", "b"], c=dict()))
        with zest.raises(SchemaValidationFailed):
            test_s.validate(dict(a=1, b=[1], c=dict()))
        with zest.raises(SchemaValidationFailed):
            test_s.validate(dict(a=1, b=["a"], c=1))

    def defaults():
        test_s = None

        def _before():
            nonlocal test_s
            test_s = s(
                s.is_dict(
                    all_required=True,
                    elems=dict(
                        a=s.is_int(),
                        b=s.is_int(),
                        c=s.is_dict(all_required=True,
                                    elems=dict(d=s.is_int(), e=s.is_int())),
                    ),
                ))

        def it_applies_defaults_recursively():
            test = dict(a=1, c=dict(e=10))

            with zest.raises(SchemaValidationFailed):
                test_s.validate(test)

            test_s.apply_defaults(dict(a=2, b=3, c=dict(d=4)), apply_to=test)
            assert test["a"] == 1
            assert test["b"] == 3
            assert test["c"]["d"] == 4
            assert test["c"]["e"] == 10
            test_s.validate(test)

        def it_does_not_apply_defaults_to_none_by_default():
            test = dict(a=None, b=1, c=None)
            test_s.apply_defaults(dict(a=1, b=3, c=dict(d=4, e=5)),
                                  apply_to=test)
            assert test["a"] is None
            assert test["c"] is None

        def it_applies_defaults_on_none():
            test = dict(a=None, b=1, c=None)
            test_s.apply_defaults(dict(a=1, b=3, c=dict(d=4, e=5)),
                                  apply_to=test,
                                  override_nones=True)
            assert test["a"] == 1
            assert test["b"] == 1
            assert test["c"]["d"] == 4
            assert test["c"]["e"] == 5

        def it_allows_elems_in_dict():
            s(s.is_dict(dict(a=s.is_int(noneable=True))))

        def it_raises_on_a_missing_default():
            test = dict(a=1, c=dict())
            test_s.apply_defaults(defaults=dict(a=2, b=3), apply_to=test)
            with zest.raises(SchemaValidationFailed):
                test_s.validate(test)

        def it_applies_a_none_to_a_missing_key():
            test = dict()
            test_s.apply_defaults(defaults=dict(a=None), apply_to=test)
            assert test["a"] is None

        def it_applies_a_none_to_a_missing_dict():
            test = dict()
            test_s.apply_defaults(defaults=dict(c=None), apply_to=test)
            assert test["c"] is None

        def it_does_not_overwrite_an_existing_dict():
            test_s = s(s.is_kws_r(a=s.is_dict()))
            test = dict(a=dict(b=1))
            # a has a good value, do not overwrite it
            test_s.apply_defaults(defaults=dict(a=None), apply_to=test)
            assert test["a"]["b"] == 1

        zest()

    def requirements():
        def it_returns_required_elems():
            userdata = dict(some_key=1)

            test_s = s(
                s.is_dict(
                    all_required=True,
                    elems=dict(
                        a=s.is_int(),
                        b=s.is_float(help="A float"),
                        c=s.is_number(),
                        d=s.is_str(userdata=userdata),
                        e=s.is_list(),
                        f=s.is_dict(all_required=True,
                                    elems=dict(d=s.is_int(), e=s.is_int())),
                    ),
                ))
            reqs = test_s.requirements()
            assert reqs == [
                ("a", int, None, None),
                ("b", float, "A float", None),
                ("c", float, None, None),
                ("d", str, None, userdata),
                ("e", list, None, None),
                ("f", dict, None, None),
            ]

        def it_returns_none_on_a_non_dict_schema():
            test_s = s(s.is_str())
            reqs = test_s.requirements()
            assert reqs == []

        zest()

    def it_shows_help():
        schema = s(
            s.is_kws(a=s.is_dict(
                help="Help for a",
                elems=dict(
                    b=s.is_int(help="Help for b"),
                    c=s.is_kws(d=s.is_int(help="Help for d")),
                ),
            )))
        schema.help()

        help_calls = m_print_help.normalized_calls()
        help_calls = [{h["key"]: h["help"]} for h in help_calls]
        assert help_calls == [
            {
                "root": None
            },
            {
                "a": "Help for a"
            },
            {
                "b": "Help for b"
            },
            {
                "c": None
            },
            {
                "d": "Help for d"
            },
        ]

    def top_level_fields():
        def it_fetches_list_elem_type():
            schema = s(s.is_dict(elems=dict(a=s.is_list(s.is_int()))))
            tlf = schema.top_level_fields()
            assert tlf[0][0] == "a" and tlf[0][4] is int

        def it_fetches_user_data():
            schema = s(
                s.is_dict(
                    help="Help for a",
                    elems=dict(
                        b=s.is_int(help="Help for b", userdata="userdata_1"),
                        c=s.is_kws(d=s.is_int(help="Help for d")),
                    ),
                ))
            tlf = schema.top_level_fields()
            assert tlf[0][0] == "b" and tlf[0][3] == "userdata_1"

        zest()

    zest()