コード例 #1
0
 def __init__(self, *args, **kwargs):
     super(self.__class__, self).__init__(*args, **kwargs)
     self.MH = MH()
     self.err = Exception("Some Exception")
     self.req_id = "TEST-Req-ID"
     self.url = mock.MagicMock()
     self.request = mock.MagicMock()
     self.client = mock.MagicMock()
     self.service_name = mock.MagicMock()
     self.callback_url = mock.MagicMock()
     self.service_name = mock.MagicMock()
     self.body = mock.MagicMock()
     self.desc = mock.MagicMock()
     self.response = mock.MagicMock()
     self.remote_addr = mock.MagicMock()
     self.json_body = mock.MagicMock()
     self.F = formatter
コード例 #2
0
ファイル: helpers.py プロジェクト: Ya-dola/MangaReaderScraper
def setup_uploader(uploader):
    upl = uploader()
    manga = mock.MagicMock()
    manga.name = mock.Mock(return_value="hiya")
    upl._setup_adapter(manga)
    return upl
コード例 #3
0
        class UnitTestSqliteHook(SqliteHook):
            conn_name_attr = 'test_conn_id'
            log = mock.MagicMock()

            def get_conn(self):
                return conn
コード例 #4
0
    def test_kd_resampling(self, resampler, create_filename, zarr_open,
                           xr_dset, cnc):
        """Test the kd resampler."""
        import numpy as np
        import dask.array as da
        from satpy.resample import KDTreeResampler
        data, source_area, swath_data, source_swath, target_area = get_test_data()
        mock_dset = mock.MagicMock()
        xr_dset.return_value = mock_dset
        resampler = KDTreeResampler(source_swath, target_area)
        resampler.precompute(
            mask=da.arange(5, chunks=5).astype(np.bool), cache_dir='.')
        resampler.resampler.get_neighbour_info.assert_called()
        # swath definitions should not be cached
        self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0)
        resampler.resampler.reset_mock()
        cnc.assert_called_once()

        resampler = KDTreeResampler(source_area, target_area)
        resampler.precompute()
        resampler.resampler.get_neighbour_info.assert_called_with(mask=None)

        try:
            the_dir = tempfile.mkdtemp()
            resampler = KDTreeResampler(source_area, target_area)
            create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr')
            zarr_open.side_effect = ValueError()
            resampler.precompute(cache_dir=the_dir)
            # assert data was saved to the on-disk cache
            self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1)
            # assert that zarr_open was called to try to zarr_open something from disk
            self.assertEqual(len(zarr_open.mock_calls), 1)
            # we should have cached things in-memory
            self.assertEqual(len(resampler._index_caches), 1)
            nbcalls = len(resampler.resampler.get_neighbour_info.mock_calls)
            # test reusing the resampler
            zarr_open.side_effect = None

            class FakeZarr(dict):

                def close(self):
                    pass

                def astype(self, dtype):
                    pass

            zarr_open.return_value = FakeZarr(valid_input_index=1,
                                              valid_output_index=2,
                                              index_array=3,
                                              distance_array=4)
            resampler.precompute(cache_dir=the_dir)
            # we already have things cached in-memory, no need to save again
            self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1)
            # we already have things cached in-memory, don't need to load
            self.assertEqual(len(zarr_open.mock_calls), 1)
            # we should have cached things in-memory
            self.assertEqual(len(resampler._index_caches), 1)
            self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls)

            # test loading saved resampler
            resampler = KDTreeResampler(source_area, target_area)
            resampler.precompute(cache_dir=the_dir)
            self.assertEqual(len(zarr_open.mock_calls), 4)
            self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls)
            # we should have cached things in-memory now
            self.assertEqual(len(resampler._index_caches), 1)
        finally:
            shutil.rmtree(the_dir)

        fill_value = 8
        resampler.compute(data, fill_value=fill_value)
        resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value)
コード例 #5
0
ファイル: helpers.py プロジェクト: Ya-dola/MangaReaderScraper
 def files_search(self, *args, **kwargs):
     searcher = mock.MagicMock()
     searcher.matches = self.match_found
     return searcher
コード例 #6
0
                for ch in channels} == set(entity_info[DEV_SIG_CHANNELS])
        assert ent_cls.__name__ == entity_info[DEV_SIG_ENT_MAP_CLASS]


def _ch_mock(cluster):
    """Return mock of a channel with a cluster."""
    channel = mock.MagicMock()
    type(channel).cluster = mock.PropertyMock(
        return_value=cluster(mock.MagicMock()))
    return channel


@mock.patch(
    "homeassistant.components.zha.core.discovery.ProbeEndpoint"
    ".handle_on_off_output_cluster_exception",
    new=mock.MagicMock(),
)
@mock.patch(
    "homeassistant.components.zha.core.discovery.ProbeEndpoint.probe_single_cluster"
)
def _test_single_input_cluster_device_class(probe_mock):
    """Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class."""

    door_ch = _ch_mock(zigpy.zcl.clusters.closures.DoorLock)
    cover_ch = _ch_mock(zigpy.zcl.clusters.closures.WindowCovering)
    multistate_ch = _ch_mock(zigpy.zcl.clusters.general.MultistateInput)

    class QuirkedIAS(zigpy.quirks.CustomCluster,
                     zigpy.zcl.clusters.security.IasZone):
        pass
コード例 #7
0
async def test_devices(
    device,
    hass_disable_services,
    zigpy_device_mock,
    zha_device_joined_restored,
):
    """Test device discovery."""
    entity_registry = homeassistant.helpers.entity_registry.async_get(
        hass_disable_services)

    zigpy_device = zigpy_device_mock(
        device[SIG_ENDPOINTS],
        "00:11:22:33:44:55:66:77",
        device[SIG_MANUFACTURER],
        device[SIG_MODEL],
        node_descriptor=device[SIG_NODE_DESC],
        patch_cluster=False,
    )

    cluster_identify = _get_first_identify_cluster(zigpy_device)
    if cluster_identify:
        cluster_identify.request.reset_mock()

    orig_new_entity = zha_channels.ChannelPool.async_new_entity
    _dispatch = mock.MagicMock(wraps=orig_new_entity)
    try:
        zha_channels.ChannelPool.async_new_entity = lambda *a, **kw: _dispatch(
            *a, **kw)
        zha_dev = await zha_device_joined_restored(zigpy_device)
        await hass_disable_services.async_block_till_done()
    finally:
        zha_channels.ChannelPool.async_new_entity = orig_new_entity

    entity_ids = hass_disable_services.states.async_entity_ids()
    await hass_disable_services.async_block_till_done()
    zha_entity_ids = {
        ent
        for ent in entity_ids if ent.split(".")[0] in zha_const.PLATFORMS
    }

    if cluster_identify:
        called = int(zha_device_joined_restored.name == "zha_device_joined")
        assert cluster_identify.request.call_count == called
        assert cluster_identify.request.await_count == called
        if called:
            assert cluster_identify.request.call_args == mock.call(
                False,
                64,
                (zigpy.types.uint8_t, zigpy.types.uint8_t),
                2,
                0,
                expect_reply=True,
                manufacturer=None,
                tries=1,
                tsn=None,
            )

    event_channels = {
        ch.id
        for pool in zha_dev.channels.pools
        for ch in pool.client_channels.values()
    }

    entity_map = device[DEV_SIG_ENT_MAP]
    assert zha_entity_ids == {
        e[DEV_SIG_ENT_MAP_ID]
        for e in entity_map.values() if not e.get("default_match", False)
    }
    assert event_channels == set(device[DEV_SIG_EVT_CHANNELS])

    for call in _dispatch.call_args_list:
        _, component, entity_cls, unique_id, channels = call[0]
        key = (component, unique_id)
        entity_id = entity_registry.async_get_entity_id(
            component, "zha", unique_id)

        assert key in entity_map
        assert entity_id is not None
        no_tail_id = NO_TAIL_ID.sub("", entity_map[key][DEV_SIG_ENT_MAP_ID])
        assert entity_id.startswith(no_tail_id)
        assert {ch.name
                for ch in channels} == set(entity_map[key][DEV_SIG_CHANNELS])
        assert entity_cls.__name__ == entity_map[key][DEV_SIG_ENT_MAP_CLASS]
コード例 #8
0
ファイル: test_security.py プロジェクト: lgov/airflow
 def test_has_access(self, mock_has_view_access):
     user = mock.MagicMock()
     user.is_anonymous = False
     mock_has_view_access.return_value = True
     self.assertTrue(self.security_manager.has_access('perm', 'view', user))
コード例 #9
0
ファイル: mock_process.py プロジェクト: folly3/airflow-1
 def __init__(self, *args, **kwargs):
     output = kwargs.get('output', ['' for _ in range(10)])
     self.readline = mock.MagicMock(
         side_effect=[line.encode() for line in output])
コード例 #10
0
def _create_list_response(messages, token):
    page = [
        mock.MagicMock(payload={"message": message}) for message in messages
    ]
    return mock.MagicMock(pages=(n for n in [page]), next_page_token=token)
コード例 #11
0
ファイル: test_security.py プロジェクト: lgov/airflow
 def test_get_user_roles(self):
     user = mock.MagicMock()
     user.is_anonymous = False
     roles = self.appbuilder.sm.find_role('Admin')
     user.roles = roles
     self.assertEqual(self.security_manager.get_user_roles(user), roles)
コード例 #12
0
 def setUp(self):
     super(TestLegacyUpdateCinderStore, self).setUp()
     self.vol_id = uuid.uuid4()
     self.cinder_store_mock = FakeObject(
         client=mock.MagicMock(), volumes=FakeObject(
             get=lambda v_id: FakeObject(volume_type='fast'),
             detach=mock.MagicMock(),
             create=lambda size_gb, name, metadata, volume_type:
             FakeObject(
                 id=self.vol_id, manager=FakeObject(
                     get=lambda vol_id: FakeObject(
                         manager=FakeObject(
                             get=lambda vol_id: FakeObject(
                                 status='in-use',
                                 begin_detaching=mock.MagicMock(),
                                 terminate_connection=mock.MagicMock())),
                         id=vol_id,
                         status='available',
                         size=1,
                         reserve=mock.MagicMock(),
                         initialize_connection=mock.MagicMock(),
                         encrypted=False,
                         unreserve=mock.MagicMock(),
                         delete=mock.MagicMock(),
                         attach=mock.MagicMock(),
                         update_all_metadata=mock.MagicMock(),
                         update_readonly_flag=mock.MagicMock())))))
コード例 #13
0
ファイル: test_model_tasks.py プロジェクト: zongzw/octavia
    def setUp(self):

        self.listener_mock = mock.MagicMock()
        self.listener_mock.name = 'TEST'

        super(TestObjectUpdateTasks, self).setUp()
 def setUp(self):
     self.__helper = \
         engine_api_visualizations_helper.EngineAPIVisualizationsHelper(
             server_address='https://test-server',
             auth_cookie=mock.MagicMock())
コード例 #15
0
def create_epics_device(prefix=PREFIX, rb_pv=RB_PV, sp_pv=SP_PV, enabled=True):
    mock_cs = mock.MagicMock()
    mock_cs.get_single.return_value = 40.0
    device = EpicsDevice(prefix, mock_cs, enabled=enabled, rb_pv=rb_pv, sp_pv=sp_pv)
    return device
コード例 #16
0
ファイル: test_dataprep.py プロジェクト: ysktir/airflow-1
class TestGoogleDataprepHook(unittest.TestCase):
    def setUp(self):
        with mock.patch("airflow.hooks.base.BaseHook.get_connection") as conn:
            conn.return_value.extra_dejson = EXTRA
            self.hook = dataprep.GoogleDataprepHook(
                dataprep_conn_id="dataprep_default")

    @patch("airflow.providers.google.cloud.hooks.dataprep.requests.get")
    def test_get_jobs_for_job_group_should_be_called_once_with_params(
            self, mock_get_request):
        self.hook.get_jobs_for_job_group(JOB_ID)
        mock_get_request.assert_called_once_with(
            f"{URL}/{JOB_ID}/jobs",
            headers={
                "Content-Type": "application/json",
                "Authorization": f"Bearer {TOKEN}"
            },
        )

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[HTTPError(), mock.MagicMock()],
    )
    def test_get_jobs_for_job_group_should_pass_after_retry(
            self, mock_get_request):
        self.hook.get_jobs_for_job_group(JOB_ID)
        assert mock_get_request.call_count == 2

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[mock.MagicMock(), HTTPError()],
    )
    def test_get_jobs_for_job_group_should_not_retry_after_success(
            self, mock_get_request):
        # pylint: disable=no-member
        self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock()
        self.hook.get_jobs_for_job_group(JOB_ID)
        assert mock_get_request.call_count == 1

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError(),
            mock.MagicMock()
        ],
    )
    def test_get_jobs_for_job_group_should_retry_after_four_errors(
            self, mock_get_request):
        # pylint: disable=no-member
        self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock()
        self.hook.get_jobs_for_job_group(JOB_ID)
        assert mock_get_request.call_count == 5

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError()
        ],
    )
    def test_get_jobs_for_job_group_raise_error_after_five_calls(
            self, mock_get_request):
        with pytest.raises(RetryError) as ctx:
            # pylint: disable=no-member
            self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock()
            self.hook.get_jobs_for_job_group(JOB_ID)
        assert "HTTPError" in str(ctx.value)
        assert mock_get_request.call_count == 5

    @patch("airflow.providers.google.cloud.hooks.dataprep.requests.get")
    def test_get_job_group_should_be_called_once_with_params(
            self, mock_get_request):
        self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED)
        mock_get_request.assert_called_once_with(
            f"{URL}/{JOB_ID}",
            headers={
                "Content-Type": "application/json",
                "Authorization": f"Bearer {TOKEN}",
            },
            params={
                "embed": "",
                "includeDeleted": False
            },
        )

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[HTTPError(), mock.MagicMock()],
    )
    def test_get_job_group_should_pass_after_retry(self, mock_get_request):
        self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED)
        assert mock_get_request.call_count == 2

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[mock.MagicMock(), HTTPError()],
    )
    def test_get_job_group_should_not_retry_after_success(
            self, mock_get_request):
        self.hook.get_job_group.retry.sleep = mock.Mock()  # pylint: disable=no-member
        self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED)
        assert mock_get_request.call_count == 1

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError(),
            mock.MagicMock(),
        ],
    )
    def test_get_job_group_should_retry_after_four_errors(
            self, mock_get_request):
        self.hook.get_job_group.retry.sleep = mock.Mock()  # pylint: disable=no-member
        self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED)
        assert mock_get_request.call_count == 5

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.get",
        side_effect=[
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError()
        ],
    )
    def test_get_job_group_raise_error_after_five_calls(
            self, mock_get_request):
        with pytest.raises(RetryError) as ctx:
            # pylint: disable=no-member
            self.hook.get_job_group.retry.sleep = mock.Mock()
            self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED)
        assert "HTTPError" in str(ctx.value)
        assert mock_get_request.call_count == 5

    @patch("airflow.providers.google.cloud.hooks.dataprep.requests.post")
    def test_run_job_group_should_be_called_once_with_params(
            self, mock_get_request):
        data = '"{\\"wrangledDataset\\": {\\"id\\": 1234567}}"'
        self.hook.run_job_group(body_request=DATA)
        mock_get_request.assert_called_once_with(
            f"{URL}",
            headers={
                "Content-Type": "application/json",
                "Authorization": f"Bearer {TOKEN}",
            },
            data=data,
        )

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.post",
        side_effect=[HTTPError(), mock.MagicMock()],
    )
    def test_run_job_group_should_pass_after_retry(self, mock_get_request):
        self.hook.run_job_group(body_request=DATA)
        assert mock_get_request.call_count == 2

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.post",
        side_effect=[mock.MagicMock(), HTTPError()],
    )
    def test_run_job_group_should_not_retry_after_success(
            self, mock_get_request):
        self.hook.run_job_group.retry.sleep = mock.Mock()  # pylint: disable=no-member
        self.hook.run_job_group(body_request=DATA)
        assert mock_get_request.call_count == 1

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.post",
        side_effect=[
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError(),
            mock.MagicMock(),
        ],
    )
    def test_run_job_group_should_retry_after_four_errors(
            self, mock_get_request):
        self.hook.run_job_group.retry.sleep = mock.Mock()  # pylint: disable=no-member
        self.hook.run_job_group(body_request=DATA)
        assert mock_get_request.call_count == 5

    @patch(
        "airflow.providers.google.cloud.hooks.dataprep.requests.post",
        side_effect=[
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError(),
            HTTPError()
        ],
    )
    def test_run_job_group_raise_error_after_five_calls(
            self, mock_get_request):
        with pytest.raises(RetryError) as ctx:
            # pylint: disable=no-member
            self.hook.run_job_group.retry.sleep = mock.Mock()
            self.hook.run_job_group(body_request=DATA)
        assert "HTTPError" in str(ctx.value)
        assert mock_get_request.call_count == 5
コード例 #17
0
 def setUp(self):
     self.regular_user = User.objects.create(username="******")
     self.admin = User.objects.create(username="******")
     self.super_admin = User.objects.create(username="******")
     self.request = mock.MagicMock()
     self.request.user.is_authenticated = mock.MagicMock()
コード例 #18
0
 def _create_connection(self):
     return mock.MagicMock()
コード例 #19
0
def _ch_mock(cluster):
    """Return mock of a channel with a cluster."""
    channel = mock.MagicMock()
    type(channel).cluster = mock.PropertyMock(
        return_value=cluster(mock.MagicMock()))
    return channel
コード例 #20
0
 def test_multiple_opsdroids(self):
     with OpsDroid() as opsdroid:
         opsdroid.__class__.critical = mock.MagicMock()
         with OpsDroid() as opsdroid2, self.assertRaises(SystemExit):
             opsdroid2.exit()
         self.assertEqual(len(opsdroid.__class__.critical.mock_calls), 1)
コード例 #21
0
ファイル: test_init.py プロジェクト: rikroe/core
def mock_bus(hass):
    """Mock the event bus listener."""
    hass.bus.listen = mock.MagicMock()
コード例 #22
0
def test_downloadqueue_update():
    mydownloadqueue = DownloadQueue()
    mydownloadqueue.add(episode1)
    mydownloadqueue.start = mock.MagicMock(name="start")
    mydownloadqueue.update()
    assert mydownloadqueue.start.call_count == 1
コード例 #23
0
    def test_bil_resampling(self, resampler, create_filename, zarr_open,
                            xr_dset, move_existing_caches):
        """Test the bilinear resampler."""
        import numpy as np
        import dask.array as da
        import xarray as xr
        from satpy.resample import BilinearResampler
        data, source_area, swath_data, source_swath, target_area = get_test_data()

        mock_dset = mock.MagicMock()
        xr_dset.return_value = mock_dset

        # Test that bilinear resampling info calculation is called,
        # and the info is saved
        zarr_open.side_effect = IOError()
        resampler = BilinearResampler(source_swath, target_area)
        resampler.precompute(
            mask=da.arange(5, chunks=5).astype(np.bool))
        resampler.resampler.get_bil_info.assert_called()
        resampler.resampler.get_bil_info.assert_called_with()
        self.assertFalse(len(mock_dset.to_zarr.mock_calls), 1)
        resampler.resampler.reset_mock()
        zarr_open.reset_mock()
        zarr_open.side_effect = None

        # Test that get_sample_from_bil_info is called properly
        fill_value = 8
        resampler.resampler.get_sample_from_bil_info.return_value = \
            xr.DataArray(da.zeros(target_area.shape), dims=('y', 'x'))
        new_data = resampler.compute(data, fill_value=fill_value)
        resampler.resampler.get_sample_from_bil_info.assert_called_with(
            data, fill_value=fill_value, output_shape=target_area.shape)
        self.assertIn('y', new_data.coords)
        self.assertIn('x', new_data.coords)
        if CRS is not None:
            self.assertIn('crs', new_data.coords)
            self.assertIsInstance(new_data.coords['crs'].item(), CRS)
            self.assertIn('lcc', new_data.coords['crs'].item().to_proj4())
            self.assertEqual(new_data.coords['y'].attrs['units'], 'meter')
            self.assertEqual(new_data.coords['x'].attrs['units'], 'meter')
            if hasattr(target_area, 'crs'):
                self.assertIs(target_area.crs, new_data.coords['crs'].item())

        # Test that the resampling info is tried to read from the disk
        resampler = BilinearResampler(source_swath, target_area)
        resampler.precompute(cache_dir='.')
        zarr_open.assert_called()

        # Test caching the resampling info
        try:
            the_dir = tempfile.mkdtemp()
            resampler = BilinearResampler(source_area, target_area)
            create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr')
            zarr_open.reset_mock()
            zarr_open.side_effect = IOError()

            resampler.precompute(cache_dir=the_dir)
            xr_dset.assert_called()
            # assert data was saved to the on-disk cache
            self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1)
            # assert that zarr.open was called to try to load
            # something from disk
            self.assertEqual(len(zarr_open.mock_calls), 1)

            nbcalls = len(resampler.resampler.get_bil_info.mock_calls)
            # test reusing the resampler
            zarr_open.side_effect = None

            class FakeZarr(dict):

                def close(self):
                    pass

                def astype(self, dtype):
                    return self

                def compute(self):
                    return self

            zarr_open.return_value = FakeZarr(bilinear_s=1,
                                              bilinear_t=2,
                                              slices_x=3,
                                              slices_y=4,
                                              mask_slices=5,
                                              out_coords_x=6,
                                              out_coords_y=7)
            resampler.precompute(cache_dir=the_dir)
            # we already have things cached in-memory, no need to save again
            self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1)
            # we already have things cached in-memory, don't need to load
            # self.assertEqual(len(zarr_open.mock_calls), 1)
            self.assertEqual(len(resampler.resampler.get_bil_info.mock_calls), nbcalls)

            # test loading saved resampler
            resampler = BilinearResampler(source_area, target_area)
            resampler.precompute(cache_dir=the_dir)
            self.assertEqual(len(zarr_open.mock_calls), 2)
            self.assertEqual(len(resampler.resampler.get_bil_info.mock_calls), nbcalls)
            # we should have cached things in-memory now
            # self.assertEqual(len(resampler._index_caches), 1)

            resampler = BilinearResampler(source_area, target_area)
            resampler.precompute(cache_dir=the_dir)
            resampler.save_bil_info(cache_dir=the_dir)
            zarr_file = os.path.join(the_dir, 'test_cache.zarr')
            # Save again faking the cache file already exists
            with mock.patch('os.path.exists') as exists:
                exists.return_value = True
                resampler.save_bil_info(cache_dir=the_dir)
            move_existing_caches.assert_called_once_with(the_dir, zarr_file)

        finally:
            shutil.rmtree(the_dir)
コード例 #24
0
 def __init__(self, *args, **kwargs):
     self.model = mock.MagicMock()
     self.model.name = mock.sentinel.model
     super().__init__(*args, **kwargs)
コード例 #25
0
ファイル: helpers.py プロジェクト: Ya-dola/MangaReaderScraper
 def files_upload(self, *args, **kwargs):
     response = mock.MagicMock()
     response.path_lower = self.file
     response.text = "success"
     return response
コード例 #26
0
 def test_custom_model_is_not_overriden(self):
     c = CustomModelResource(request=mock.MagicMock())
     self.assertEqual(c.model.name, mock.sentinel.model)
コード例 #27
0
 def test_message(name, message, expected_return):
     # type: (str, str, Optional[str]) -> None
     mock_client = mock.MagicMock()
     mock_client.full_name = name
     mock_message = {'content': message}
     self.assertEqual(expected_return, extract_query_without_mention(mock_message, mock_client))
コード例 #28
0
 def test_set_temp_fail(self):
     """Test if setting the temperature fails."""
     self.device.setpoint_heat = mock.MagicMock(
         side_effect=somecomfort.SomeComfortError)
     self.honeywell.set_temperature(123)
コード例 #29
0
    def test_cur_fork_head_not_poet_block(self, mock_utils,
                                          mock_validator_registry_view,
                                          mock_consensus_state,
                                          mock_poet_enclave_factory,
                                          mock_consensus_state_store):
        """ Test verifies that if the current fork head is not a valid block,
            and if new_fork_head.previous_block_id == cur_fork_head.identifier
            then the new fork head switches consensus. Otherwise, raises the
            appropriate exception - trying to compare a PoET block to a
            non-PoET block that is not the direct predecessor
        """

        # create a mock_validator_registry_view
        mock_validator_registry_view.return_value.get_validator_info. \
            return_value = \
            ValidatorInfo(
                name='validator_001',
                id='validator_deadbeef',
                signup_info=SignUpInfo(
                    poet_public_key='00112233445566778899aabbccddeeff'))

        # create a mock_wait_certificate that does nothing in check_valid
        mock_wait_certificate = mock.Mock()
        mock_wait_certificate.check_valid.return_value = None

        # set mock_utils.deserialize_wait_certificate
        # to return a specific value for each fork_head that is used in
        # poet_fork_resolver.compare()
        # with cur_fork_head being deserialized first
        mock_utils.deserialize_wait_certificate.side_effect = \
            [None,
             mock_wait_certificate,
             None,
             mock_wait_certificate]

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_cur_fork_head
        mock_cur_fork_header = \
            mock.Mock(
                identifier='0123456789abcdefedcba9876543210',
                signer_pubkey='90834587139405781349807435098745',
                previous_block_id='2',
                header_signature='00112233445566778899aabbccddeeff')

        # create mock_new_fork_head
        mock_new_fork_header = \
            mock.Mock(
                identifier='0123456789abcdefedcba9876543211',
                signer_pubkey='90834587139405781349807435098745',
                previous_block_id='2',
                header_signature='00112233445566778899aabbccddeeff')

        fork_resolver = \
            poet_fork_resolver.PoetForkResolver(
                block_cache=mock_block_cache,
                state_view_factory=mock_state_view_factory,
                data_dir=self._temp_dir,
                config_dir=self._temp_dir,
                validator_id='validator_deadbeef')

        # Subtest 1: check that the test fails when the current
        # fork head is not a valid PoET block
        with self.assertRaises(TypeError) as cm:
            fork_resolver.compare_forks(cur_fork_head=mock_cur_fork_header,
                                        new_fork_head=mock_new_fork_header)
            self.assertEqual(
                'Trying to compare a PoET block to a non-PoET '
                'block that is not the direct predecessor', str(cm.exception))

        # Subtest 2: check that if new_fork_head.previous_block_id
        # == cur_fork_head.identifier
        # then the new fork head switches consensus

        # modify mock_cur_fork_header.identifier
        mock_cur_fork_header.identifier = \
            mock_new_fork_header.previous_block_id

        # check test
        with mock.patch('sawtooth_poet.poet_consensus.poet_fork_resolver.'
                        'LOGGER') as mock_logger:
            self.assertTrue(
                fork_resolver.compare_forks(
                    cur_fork_head=mock_cur_fork_header,
                    new_fork_head=mock_new_fork_header))

            # Could be a hack, but verify that the appropriate log message is
            # generated - so we at least have some faith that the failure was
            # because of what we are testing and not something else.  I know
            # that this is fragile if the log message is changed, so would
            # accept any suggestions on a better way to verify that the
            # function fails for the reason we expect.

            (message, *_), _ = mock_logger.info.call_args
            self.assertTrue(
                'New fork head switches consensus to PoET' in message)
コード例 #30
0
ファイル: test_api_publish.py プロジェクト: jamofer/cpm
    def test_publish_api(self):
        publish_service = mock.MagicMock()

        result = publish_project(publish_service)

        assert result.status_code == 0