def test_AxonMapModel(engine): set_params = {'xystep': 2, 'engine': engine, 'rho': 432, 'axlambda': 2, 'n_axons': 9, 'n_ax_segments': 50, 'xrange': (-30, 30), 'yrange': (-20, 20), 'loc_od_x': 5, 'loc_od_y': 6} model = AxonMapModel() for param in set_params: npt.assert_equal(hasattr(model.spatial, param), True) # User can override default values for key, value in set_params.items(): setattr(model.spatial, key, value) npt.assert_equal(getattr(model.spatial, key), value) model = AxonMapModel(**set_params) model.build(**set_params) for key, value in set_params.items(): npt.assert_equal(getattr(model.spatial, key), value) # Zeros in, zeros out: implant = ArgusII(stim=np.zeros(60)) npt.assert_almost_equal(model.predict_percept(implant).data, 0) implant.stim = np.zeros(60) npt.assert_almost_equal(model.predict_percept(implant).data, 0) # Implant and model must be built for same eye: with pytest.raises(ValueError): implant = ArgusII(eye='LE', stim=np.zeros(60)) model.predict_percept(implant) with pytest.raises(ValueError): AxonMapModel(eye='invalid').build() with pytest.raises(ValueError): AxonMapModel(xystep=5).build(eye='invalid')
def test_AxonMapModel_predict_percept(engine): model = AxonMapModel(xystep=0.55, axlambda=100, rho=100, thresh_percept=0, engine=engine, xrange=(-20, 20), yrange=(-15, 15), n_axons=500) model.build() # Single-electrode stim: img_stim = np.zeros(60) img_stim[47] = 1 percept = model.predict_percept(ArgusII(stim=img_stim)) # Single bright pixel, rest of arc is less bright: npt.assert_equal(np.sum(percept.data > 0.8), 1) npt.assert_equal(np.sum(percept.data > 0.6), 2) npt.assert_equal(np.sum(percept.data > 0.1), 7) npt.assert_equal(np.sum(percept.data > 0.0001), 32) # Overall only a few bright pixels: npt.assert_almost_equal(np.sum(percept.data), 3.31321, decimal=3) # Brightest pixel is in lower right: npt.assert_almost_equal(percept.data[33, 46, 0], np.max(percept.data)) # Top half is empty: npt.assert_almost_equal(np.sum(percept.data[:27, :, 0]), 0) # Same for lower band: npt.assert_almost_equal(np.sum(percept.data[39:, :, 0]), 0) # Full Argus II with small lambda: 60 bright spots model = AxonMapModel(engine='serial', xystep=1, rho=100, axlambda=40, xrange=(-20, 20), yrange=(-15, 15), n_axons=500) model.build() percept = model.predict_percept(ArgusII(stim=np.ones(60))) # Most spots are pretty bright, but there are 2 dimmer ones (due to their # location on the retina): npt.assert_equal(np.sum(percept.data > 0.5), 28) npt.assert_equal(np.sum(percept.data > 0.275), 56) # Model gives same outcome as Spatial: spatial = AxonMapSpatial(engine='serial', xystep=1, rho=100, axlambda=40) spatial.build() spatial_percept = model.predict_percept(ArgusII(stim=np.ones(60))) npt.assert_almost_equal(percept.data, spatial_percept.data) npt.assert_equal(percept.time, None)
def test_AxonMapModel_grow_axon_bundles(engine): for n_axons in [1, 2, 3, 5, 10]: model = AxonMapModel(xystep=2, engine=engine, n_axons=n_axons, axons_range=(-20, 20), xrange=(-20, 20), yrange=(-15, 15)) bundles = model.spatial.grow_axon_bundles() npt.assert_equal(len(bundles), n_axons)
def test_AxonMapModel_find_closest_axon(engine): model = AxonMapModel(xystep=1, engine=engine, n_axons=5, xrange=(-20, 20), yrange=(-15, 15), axons_range=(-45, 45)) model.build() # Pretend there is an axon close to each point on the grid: bundles = [ np.array([x + 0.001, y - 0.001], dtype=np.float32).reshape((1, 2)) for x, y in zip(model.spatial.grid.xret.ravel(), model.spatial.grid.yret.ravel()) ] closest = model.spatial.find_closest_axon(bundles) for ax1, ax2 in zip(bundles, closest): npt.assert_almost_equal(ax1[0, 0], ax2[0, 0]) npt.assert_almost_equal(ax1[0, 1], ax2[0, 1]) # Looking up just one point does not return a list of axons: axon = bundles[0] closest = model.spatial.find_closest_axon(bundles, xret=axon[0, 0], yret=axon[0, 1]) npt.assert_almost_equal(closest, axon) # Return the index as well: closest, closest_idx = model.spatial.find_closest_axon(bundles, xret=axon[0, 0], yret=axon[0, 1], return_index=True) npt.assert_almost_equal(closest, axon) npt.assert_equal(closest_idx, 0)
def plot_on_axon_map(self, annotate_implant=False, annotate_quadrants=True, ax=None): if ax is None: _, ax = plt.subplots(figsize=(10, 10)) AxonMapModel().plot(annotate=annotate_quadrants, ax=ax) self.earray.plot(annotate=annotate_implant, ax=ax)
def test_AxonMapModel__jansonius2009(eye, loc_od, sign, engine): # With `rho` starting at 0, all axons should originate in the optic disc # center model = AxonMapModel(loc_od=loc_od, xystep=2, engine=engine, ax_segments_range=(0, 45), n_ax_segments=100) for phi0 in [-135.0, 66.0, 128.0]: ax_pos = model.spatial._jansonius2009(phi0) npt.assert_almost_equal(ax_pos[0, 0], loc_od[0]) npt.assert_almost_equal(ax_pos[0, 1], loc_od[1]) # These axons should all end at the meridian for phi0 in [110.0, 135.0, 160.0]: model = AxonMapModel(loc_od=(15, 2), xystep=2, engine=engine, n_ax_segments=801, ax_segments_range=(0, 45)) ax_pos = model.spatial._jansonius2009(sign * phi0) npt.assert_almost_equal(ax_pos[-1, 1], 0.0, decimal=1) # `phi0` must be within [-180, 180] for phi0 in [-200.0, 181.0]: with pytest.raises(ValueError): failed = AxonMapModel(xystep=2, engine=engine) failed.spatial._jansonius2009(phi0) # `n_rho` must be >= 1 for n_rho in [-1, 0]: with pytest.raises(ValueError): model = AxonMapModel(n_ax_segments=n_rho, xystep=2, engine=engine) model.spatial._jansonius2009(0.0) # `ax_segments_range` must have min <= max for lorho in [-200.0, 90.0]: with pytest.raises(ValueError): model = AxonMapModel(ax_segments_range=(lorho, 45), xystep=2, engine=engine) model.spatial._jansonius2009(0) for hirho in [-200.0, 40.0]: with pytest.raises(ValueError): model = AxonMapModel(ax_segments_range=(45, hirho), xystep=2, engine=engine) model.spatial._jansonius2009(0) # A single axon fiber with `phi0`=0 should return a single pixel location # that corresponds to the optic disc model = AxonMapModel(loc_od=loc_od, xystep=2, engine=engine, eye=eye, ax_segments_range=(0, 0), n_ax_segments=1) single_fiber = model.spatial._jansonius2009(0) npt.assert_equal(len(single_fiber), 1) npt.assert_almost_equal(single_fiber[0], loc_od)
def test_plot_argus_simulated_phosphenes(implant): implant.stim = {'A1': [1, 0, 0], 'B2': [0, 1, 0], 'C3': [0, 0, 1]} percepts = ScoreboardModel().build().predict_percept(implant) plot_argus_simulated_phosphenes(percepts, implant) # Add axon map: _, ax = plt.subplots() plot_argus_simulated_phosphenes(percepts, implant, ax=ax, axon_map=AxonMapModel())
def test_AxonMapModel_calc_bundle_tangent(engine): model = AxonMapModel(xystep=5, engine=engine, n_axons=500, xrange=(-20, 20), yrange=(-15, 15), n_ax_segments=500, axons_range=(-180, 180), ax_segments_range=(3, 50)) npt.assert_almost_equal(model.spatial.calc_bundle_tangent(0, 0), 0.4819, decimal=3) npt.assert_almost_equal(model.spatial.calc_bundle_tangent(0, 1000), -0.5532, decimal=3) with pytest.raises(TypeError): model.spatial.calc_bundle_tangent([0], 1000) with pytest.raises(TypeError): model.spatial.calc_bundle_tangent(0, [1000])
def test_plot_argus_phosphenes(): df = pd.DataFrame([ { 'subject': 'S1', 'electrode': 'A1', 'image': np.random.rand(10, 10), 'xrange': (-10, 10), 'yrange': (-10, 10) }, { 'subject': 'S1', 'electrode': 'B2', 'image': np.random.rand(10, 10), 'xrange': (-10, 10), 'yrange': (-10, 10) }, ]) _, ax = plt.subplots() plot_argus_phosphenes(df, ArgusI(), ax=ax) plot_argus_phosphenes(df, ArgusII(), ax=ax) # Add axon map: _, ax = plt.subplots() plot_argus_phosphenes(df, ArgusI(), ax=ax, axon_map=AxonMapModel()) # Data must be a DataFrame: with pytest.raises(TypeError): plot_argus_phosphenes(np.ones(10), ArgusI()) # DataFrame must have the required columns: with pytest.raises(ValueError): plot_argus_phosphenes(pd.DataFrame(), ArgusI()) # Subjects must all be the same: with pytest.raises(ValueError): dff = pd.DataFrame([{'subject': 'S1'}, {'subject': 'S2'}]) plot_argus_phosphenes(dff, ArgusI()) # Works only for Argus: with pytest.raises(TypeError): plot_argus_phosphenes(df, AlphaAMS()) # Works only for axon maps: with pytest.raises(TypeError): plot_argus_phosphenes(df, ArgusI(), ax=ax, axon_map=ScoreboardModel()) # Manual subject selection plot_argus_phosphenes(df[df.electrode == 'B2'], ArgusI(), ax=ax) # If no implant given, dataframe must have additional columns: with pytest.raises(ValueError): plot_argus_phosphenes(df, ax=ax) df['implant_type_str'] = 'ArgusII' df['implant_x'] = 0 df['implant_y'] = 0 df['implant_rot'] = 0 plot_argus_phosphenes(df, ax=ax)
def test_AxonMapModel_find_closest_axon(engine): model = AxonMapModel(xystep=1, engine=engine, n_axons=5, xrange=(-20, 20), yrange=(-15, 15), axons_range=(-45, 45)) model.build() # Pretend there is an axon close to each point on the grid: bundles = [np.array([x + 0.001, y - 0.001], dtype=np.float32).reshape((1, 2)) for x, y in zip(model.spatial.grid.xret.ravel(), model.spatial.grid.yret.ravel())] closest = model.spatial.find_closest_axon(bundles) for ax1, ax2 in zip(bundles, closest): npt.assert_almost_equal(ax1[0, 0], ax2[0, 0]) npt.assert_almost_equal(ax1[0, 1], ax2[0, 1])
def test_AxonMapModel_calc_axon_contribution(engine): model = AxonMapModel(xystep=2, engine=engine, n_axons=10, xrange=(-20, 20), yrange=(-15, 15), axons_range=(-30, 30)) model.build() xyret = np.column_stack((model.spatial.grid.xret.ravel(), model.spatial.grid.yret.ravel())) bundles = model.spatial.grow_axon_bundles() axons = model.spatial.find_closest_axon(bundles) contrib = model.spatial.calc_axon_contribution(axons) # Check lambda math: for ax, xy in zip(contrib, xyret): axon = np.insert(ax, 0, list(xy) + [0], axis=0) d2 = np.cumsum(np.diff(axon[:, 0], axis=0) ** 2 + np.diff(axon[:, 1], axis=0) ** 2) sensitivity = np.exp(-d2 / (2.0 * model.spatial.axlambda ** 2)) npt.assert_almost_equal(sensitivity, ax[:, 2])
def test_AxonMapModel(engine): set_params = { 'xystep': 2, 'engine': engine, 'rho': 432, 'axlambda': 20, 'n_axons': 9, 'n_ax_segments': 50, 'xrange': (-30, 30), 'yrange': (-20, 20), 'loc_od': (5, 6) } model = AxonMapModel() for param in set_params: npt.assert_equal(hasattr(model.spatial, param), True) # User can override default values for key, value in set_params.items(): setattr(model.spatial, key, value) npt.assert_equal(getattr(model.spatial, key), value) model = AxonMapModel(**set_params) model.build(**set_params) for key, value in set_params.items(): npt.assert_equal(getattr(model.spatial, key), value) # Converting ret <=> dva npt.assert_equal(isinstance(model.retinotopy, Watson2014Map), True) npt.assert_almost_equal(model.retinotopy.ret2dva(0, 0), (0, 0)) npt.assert_almost_equal(model.retinotopy.dva2ret(0, 0), (0, 0)) model2 = AxonMapModel(retinotopy=Watson2014DisplaceMap()) npt.assert_equal(isinstance(model2.retinotopy, Watson2014DisplaceMap), True) # Zeros in, zeros out: implant = ArgusII(stim=np.zeros(60)) npt.assert_almost_equal(model.predict_percept(implant).data, 0) implant.stim = np.zeros(60) npt.assert_almost_equal(model.predict_percept(implant).data, 0) # Implant and model must be built for same eye: with pytest.raises(ValueError): implant = ArgusII(eye='LE', stim=np.zeros(60)) model.predict_percept(implant) with pytest.raises(ValueError): AxonMapModel(eye='invalid').build() with pytest.raises(ValueError): AxonMapModel(xystep=5).build(eye='invalid') # Lambda cannot be too small: with pytest.raises(ValueError): AxonMapModel(axlambda=9).build()
def test_deepcopy_AxonMapModel(): original = AxonMapModel() copied = copy.deepcopy(original) # Assert these are two different objects npt.assert_equal(id(original) != id(copied), True) # Assert the objects are equivalent npt.assert_equal(original.__dict__ == copied.__dict__, True) # Assert that __eq__ works npt.assert_equal(original == copied, True) # Assert they do not share the same AxonMapSpatial Object npt.assert_equal(original.spatial == copied.spatial, True) npt.assert_equal(id(original.spatial) != id(copied.spatial), True) # Assert changing copied doesn't change original copied.spatial.xrange = (-10, 10) npt.assert_equal(original.spatial != copied.spatial, True)
def test_plot_argus_phosphenes(): df = pd.DataFrame([ { 'subject': 'S1', 'electrode': 'A1', 'image': np.random.rand(10, 10), 'img_x_dva': (-10, 10), 'img_y_dva': (-10, 10) }, { 'subject': 'S1', 'electrode': 'B2', 'image': np.random.rand(10, 10), 'img_x_dva': (-10, 10), 'img_y_dva': (-10, 10) }, ]) _, ax = plt.subplots() plot_argus_phosphenes(df, ArgusI(), ax=ax) plot_argus_phosphenes(df, ArgusII(), ax=ax) # Add axon map: _, ax = plt.subplots() plot_argus_phosphenes(df, ArgusI(), ax=ax, axon_map=AxonMapModel()) # Data must be a DataFrame: with pytest.raises(TypeError): plot_argus_phosphenes(np.ones(10), ArgusI()) # DataFrame must have the required columns: with pytest.raises(ValueError): plot_argus_phosphenes(pd.DataFrame(), ArgusI()) # Subjects must all be the same: with pytest.raises(ValueError): dff = pd.DataFrame([{'subject': 'S1'}, {'subject': 'S2'}]) plot_argus_phosphenes(dff, ArgusI()) # Works only for Argus: with pytest.raises(TypeError): plot_argus_phosphenes(df, AlphaAMS())
def test_AxonMapModel_calc_axon_sensitivity(engine): model = AxonMapModel(xystep=2, engine=engine, n_axons=10, xrange=(-20, 20), yrange=(-15, 15), axons_range=(-30, 30)) model.build() xyret = np.column_stack( (model.spatial.grid.xret.ravel(), model.spatial.grid.yret.ravel())) bundles = model.spatial.grow_axon_bundles() axons = model.spatial.find_closest_axon(bundles) # Need two separate contribs, one to get cut off axons from, and another # to actually test against (with/without padding) contrib = model.spatial.calc_axon_sensitivity(axons, pad=False) pad = engine == 'jax' axon_contrib = model.spatial.calc_axon_sensitivity(axons, pad=pad) # Check lambda math: max_axon_length = max([len(ax) for ax in contrib]) for ax, xy, model_ax in zip(contrib, xyret, axon_contrib): axon = np.insert(ax, 0, list(xy) + [0], axis=0) d2 = np.cumsum( np.sqrt( np.diff(axon[:, 0], axis=0)**2 + np.diff(axon[:, 1], axis=0)**2))**2 max_d2 = -2.0 * model.axlambda**2 * np.log(model.min_ax_sensitivity) idx_d2 = d2 < max_d2 sensitivity = np.exp(-d2[idx_d2] / (2.0 * model.spatial.axlambda**2)) # Axons need to be padded for jax if engine == 'jax': s = np.zeros((max_axon_length)) s[:len(sensitivity)] = sensitivity if len(sensitivity) > 0: s[len(sensitivity):] = sensitivity[-1] sensitivity = s.astype(np.float32) npt.assert_almost_equal(sensitivity, model_ax[:, 2])
Creating the model ------------------ The first step is to instantiate the :py:class:`~pulse2percept.models.AxonMapModel` class by calling its constructor method. The two most important parameters to set are ``rho`` and ``axlambda`` from the equation above (here set to 200 micrometers and 500 micrometers, respectively): """ # sphinx_gallery_thumbnail_number = 2 from pulse2percept.models import AxonMapModel model = AxonMapModel(rho=200, axlambda=500) ############################################################################## # Parameters you don't specify will take on default values. You can inspect # all current model parameters as follows: print(model) ############################################################################## # This reveals a number of other parameters to set, such as: # # * ``xrange``, ``yrange``: the extent of the visual field to be simulated, # specified as a range of x and y coordinates (in degrees of visual angle, # or dva). For example, we are currently sampling x values between -20 dva # and +20dva, and y values between -15 dva and +15 dva. # * ``xystep``: The resolution (in dva) at which to sample the visual field.
hex_grid.plot() ############################################################################## # The following example centers the grid on (x,y) = (-600um, 200 um), # z=150um away from the retinal surface, and rotates it clockwise by 45 degrees # (note the minus sign): from numpy import pi offset_grid = ElectrodeGrid((11, 13), 500, type='hex', x=-600, y=200, z=150, rot=-pi / 4, etype=DiskElectrode, r=100) ############################################################################## # .. note:: # # Clockwise/counter-clockwise rotations refer to rotations on the retinal # surface (that is, as if seen on a fundus photograph). # # We can also plot the grid on top of a map of retinal nerve fiber bundles: from pulse2percept.models import AxonMapModel AxonMapModel().plot() offset_grid.plot()
Creating the model ------------------ The first step is to instantiate the :py:class:`~pulse2percept.models.AxonMapModel` class by calling its constructor method. The two most important parameters to set are ``rho`` and ``axlambda`` from the equation above (here set to 100 micrometers and 150 micrometers, respectively): """ # sphinx_gallery_thumbnail_number = 2 from pulse2percept.models import AxonMapModel model = AxonMapModel(rho=100, axlambda=150) ############################################################################## # Parameters you don't specify will take on default values. You can inspect # all current model parameters as follows: print(model) ############################################################################## # This reveals a number of other parameters to set, such as: # # * ``xrange``, ``yrange``: the extent of the visual field to be simulated, # specified as a range of x and y coordinates (in degrees of visual angle, # or dva). For example, we are currently sampling x values between -20 dva # and +20dva, and y values between -15 dva and +15 dva. # * ``xystep``: The resolution (in dva) at which to sample the visual field.
def test_AxonMapModel_predict_percept(engine): model = AxonMapModel(xystep=0.55, axlambda=100, rho=100, thresh_percept=0, engine=engine, xrange=(-20, 20), yrange=(-15, 15), n_axons=500) model.build() # Single-electrode stim: img_stim = np.zeros(60) img_stim[47] = 1 # Axon map jax predict_percept not implemented yet if engine == 'jax': with pytest.raises(NotImplementedError): percept = model.predict_percept(ArgusII(stim=img_stim)) return percept = model.predict_percept(ArgusII(stim=img_stim)) # Single bright pixel, rest of arc is less bright: npt.assert_equal(np.sum(percept.data > 0.8), 1) npt.assert_equal(np.sum(percept.data > 0.6), 2) npt.assert_equal(np.sum(percept.data > 0.1), 7) npt.assert_equal(np.sum(percept.data > 0.0001), 31) # Overall only a few bright pixels: npt.assert_almost_equal(np.sum(percept.data), 3.3087, decimal=3) # Brightest pixel is in lower right: npt.assert_almost_equal(percept.data[33, 46, 0], np.max(percept.data)) # Top half is empty: npt.assert_almost_equal(np.sum(percept.data[:27, :, 0]), 0) # Same for lower band: npt.assert_almost_equal(np.sum(percept.data[39:, :, 0]), 0) # Full Argus II with small lambda: 60 bright spots model = AxonMapModel(engine='serial', xystep=1, rho=100, axlambda=40, xrange=(-20, 20), yrange=(-15, 15), n_axons=500) model.build() percept = model.predict_percept(ArgusII(stim=np.ones(60))) # Most spots are pretty bright, but there are 2 dimmer ones (due to their # location on the retina): npt.assert_equal(np.sum(percept.data > 0.5), 28) npt.assert_equal(np.sum(percept.data > 0.275), 56) # Model gives same outcome as Spatial: spatial = AxonMapSpatial(engine='serial', xystep=1, rho=100, axlambda=40, xrange=(-20, 20), yrange=(-15, 15), n_axons=500) spatial.build() spatial_percept = spatial.predict_percept(ArgusII(stim=np.ones(60))) npt.assert_almost_equal(percept.data, spatial_percept.data) npt.assert_equal(percept.time, None) # Warning for nonzero electrode-retina distances implant = ArgusI(stim=np.ones(16), z=10) msg = ("Nonzero electrode-retina distances do not have any effect on the " "model output.") assert_warns_msg(UserWarning, model.predict_percept, msg, implant)
############################################################################### # Great! We have just reproduced a panel from Figure 2 in [Beyeler2019]_. # # As [Beyeler2019]_ went on to show, the orientation of these phosphenes is # well aligned with the map of nerve fiber bundles (NFBs) in each subject's # eye. # # To see how the phosphene drawings line up with the NFBs, we can also pass an # :py:class:`~pulse2percept.models.AxonMapModel` to the function. # Of course, we need to make sure that we use the correct dimensions. Subject # S2 had their optic disc center located 16.2 deg nasally, 1.38 deg superior # from the fovea: from pulse2percept.models import AxonMapModel model = AxonMapModel(loc_od=(16.2, 1.38)) plot_argus_phosphenes(data, argus, axon_map=model) ############################################################################### # Predicting phosphene shape # -------------------------- # # In addition, the :py:class:`~pulse2percept.models.AxonMapModel` is well # suited to predict the shape of individual phosphenes. Using the values given # in [Beyeler2019]_, we can tailor the axon map parameters to Subject 2: import numpy as np model = AxonMapModel(rho=315, axlambda=500, loc_od=(16.2, 1.38), xrange=(-30, 30),
.. _NCT00279500: https://clinicaltrials.gov/ct2/show/NCT00279500 .. _NCT00407602: https://www.clinicaltrials.gov/ct2/show/NCT00407602 """ # sphinx_gallery_thumbnail_number = 2 import matplotlib.pyplot as plt from pulse2percept.implants import * from pulse2percept.models import AxonMapModel fig, ax = plt.subplots(ncols=2, figsize=(10, 6)) # For illustrative purpose, also show the map of fiber # bundles in the optic fiber layer: model = AxonMapModel() model.plot(ax=ax[0]) # Argus I is typically implanted at a 30-45deg angle: ArgusI(rot=-0.52).plot(ax=ax[0], annotate=True) ax[0].set_title('Argus I') model.plot(ax=ax[1]) # Argus II is typically implanted at a 30-45deg angle: ArgusII(rot=-0.52).plot(ax=ax[1], annotate=False) ax[1].set_title('Argus II') ############################################################################### # PRIMA Bionic Vision System (Pixium Vision SA) # ---------------------------------------------- # # :py:class:`~pulse2percept.implants.PRIMA` is a subretinal device developed
############################################################################### # Great! We have just reproduced a panel from Figure 2 in [Beyeler2019]_. # # As [Beyeler2019]_ went on to show, the orientation of these phosphenes is # well aligned with the map of nerve fiber bundles (NFBs) in each subject's # eye. # # To see how the phosphene drawings line up with the NFBs, we can also pass an # :py:class:`~pulse2percept.models.AxonMapModel` to the function. # Of course, we need to make sure that we use the correct dimensions. Subject # S2 had their optic disc center located 14 deg nasally, 2.4 deg superior from # the fovea: from pulse2percept.models import AxonMapModel model = AxonMapModel(loc_od=(14, 2.4)) plot_argus_phosphenes(data, argus, axon_map=model) ############################################################################### # Analyzing phosphene shape # ------------------------- # # The phosphene drawings also come annotated with different shape descriptors: # area, orientation, and elongation. # Elongation is also called eccentricity in the computer vision literature, # which is not to be confused with retinal eccentricity. It is simply a number # between 0 and 1, where 0 corresponds to a circle and 1 corresponds to an # infinitesimally thin line (note that the Methods section of [Beyeler2019]_ # got it wrong). # # [Beyeler2019]_ made the point that if each phosphene could be considered a
------------------ The first step is to instantiate the :py:class:`~pulse2percept.models.AxonMapModel` class by calling its constructor method. The two most important parameters to set are ``rho`` and ``axlambda`` from the equation above (here set to 150 micrometers and 500 micrometers, respectively): """ # sphinx_gallery_thumbnail_number = 2 import numpy as np from pulse2percept.implants import ArgusII from pulse2percept.models import AxonMapModel model = AxonMapModel(rho=150, axlambda=500) ############################################################################## # Parameters you don't specify will take on default values. You can inspect # all current model parameters as follows: print(model) ############################################################################## # This reveals a number of other parameters to set, such as: # # * ``xrange``, ``yrange``: the extent of the visual field to be simulated, # specified as a range of x and y coordinates (in degrees of visual angle, # or dva). For example, we are currently sampling x values between -20 dva # and +20dva, and y values between -15 dva and +15 dva. # * ``xystep``: The resolution (in dva) at which to sample the visual field.