Ejemplo n.º 1
0
class MixedPrecisionPCMPreset(DigitalRankUpdateRPUConfig):
    """Configuration using Mixed-precision with
    class:`~aihwkit.simulator.presets.devices.PCMPresetDevice`.

    See class:`~aihwkit.simulator.configs.devices.MixedPrecisionCompound`
    for details on the mixed precision optimizer.

    The default peripheral hardware
    (:class:`~aihwkit.simulator.presets.utils.PresetIOParameters`) and
    analog update
    (:class:`~aihwkit.simulator.presets.utils.PresetUpdateParameters`)
    configuration is used otherwise.
    """

    device: DigitalRankUpdateCell = field(
        default_factory=lambda: MixedPrecisionCompound(device=
                                                       PCMPresetUnitCell(), ))
    forward: IOParameters = field(default_factory=PresetIOParameters)
    backward: IOParameters = field(default_factory=PresetIOParameters)
    update: UpdateParameters = field(default_factory=PresetUpdateParameters)
Ejemplo n.º 2
0
 def get_rpu_config(self):
     return DigitalRankUpdateRPUConfig(device=MixedPrecisionCompound(
         device=SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0),
         transfer_every=1), )
from aihwkit.simulator.configs.devices import (MixedPrecisionCompound,
                                               SoftBoundsDevice)
from aihwkit.simulator.rpu_base import cuda

# Prepare the datasets (input and expected output).
x = Tensor([[0.1, 0.2, 0.4, 0.3], [0.2, 0.1, 0.1, 0.3]])
y = Tensor([[1.0, 0.5], [0.7, 0.3]])

# Select the device model to use in the training. While one can use a
# presets as well, we here build up the RPU config from more basic
# devices. We use the relevant RPU config for using a digital rank
# update and transfer to analog device (like in mixed precision) and
# set it to a mixed precision compound which in turn uses a
# ConstantStep analog device:
rpu_config = DigitalRankUpdateRPUConfig(
    device=MixedPrecisionCompound(device=SoftBoundsDevice(), ))

# print the config (default values are omitted)
print('\nPretty-print of non-default settings:\n')
print(rpu_config)

print('\nInfo about all settings:\n')
print(repr(rpu_config))

model = AnalogLinear(4, 2, bias=True, rpu_config=rpu_config)

# a more detailed printout of the instantiated
print('\nInfo about the instantiated C++ tile:\n')
print(model.analog_tile.tile)

# Move the model and tensors to cuda if it is available.
Ejemplo n.º 4
0
                                               SoftBoundsDevice)
from aihwkit.simulator.rpu_base import cuda

# Prepare the datasets (input and expected output).
x = Tensor([[0.1, 0.2, 0.4, 0.3], [0.2, 0.1, 0.1, 0.3]])
y = Tensor([[1.0, 0.5], [0.7, 0.3]])

# Select the device model to use in the training. While one can use a
# presets as well, we here build up the RPU config from more basic
# devices. We use the relevant RPU config for using a digital rank
# update and transfer to analog device (like in mixed precision) and
# set it to a mixed precision compound which in turn uses a
# ConstantStep analog device:
rpu_config = DigitalRankUpdateRPUConfig(device=MixedPrecisionCompound(
    device=SoftBoundsDevice(),
    # adjust quantization level (0 means FP)
    n_x_bins=5,  # quantization bins of the digital rank update (activation)
    n_d_bins=3  # quantization bins of the digital rank update (error)
))

# print the config (default values are omitted)
print('\nPretty-print of non-default settings:\n')
print(rpu_config)

print('\nInfo about all settings:\n')
print(repr(rpu_config))

model = AnalogLinear(4, 2, bias=True, rpu_config=rpu_config)

# a more detailed printout of the instantiated
print('\nInfo about the instantiated C++ tile:\n')
print(model.analog_tile.tile)