def plot_device(device: Union[PulsedDevice, UnitCell], w_noise: float = 0.0, **kwargs: Any) -> None: """Plot the step response figure for a given device (preset). Note: It will use an amount of read weight noise ``w_noise`` for reading the weights. Args: device: PulsedDevice parameters w_noise: Weight noise standard deviation during read kwargs: for other parameters, see :func:`plot_response_overview` """ plt.figure(figsize=[7, 7]) # To simulate some weight read noise. io_pars = IOParameters( out_noise=0.0, # no out noise w_noise=w_noise, # quite low inp_res=-1., # turn off DAC out_bound=100., # not limiting out_res=-1., # turn off ADC bound_management=BoundManagementType.NONE, noise_management=NoiseManagementType.NONE, w_noise_type=WeightNoiseType.ADDITIVE_CONSTANT) if isinstance(device, PulsedDevice): plot_response_overview(SingleRPUConfig(device=device, forward=io_pars), **kwargs) else: plot_response_overview( UnitCellRPUConfig(device=device, forward=io_pars), **kwargs)
def get_rpu_config(self): return UnitCellRPUConfig( device=TransferCompoundDevice(unit_cell_devices=[ SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0), SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0) ], transfer_forward=IOParameters( is_perfect=True)))
def get_rpu_config(self): return UnitCellRPUConfig(device=OneSidedUnitCell( unit_cell_devices=[ConstantStepDevice(w_max_dtod=0, w_min_dtod=0) ]))
def get_rpu_config(self): return UnitCellRPUConfig(device=ReferenceUnitCell(unit_cell_devices=[ SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0), SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0) ]))
from aihwkit.simulator.rpu_base import cuda # Prepare the datasets (input and expected output). x = Tensor([[0.1, 0.2, 0.4, 0.3], [0.2, 0.1, 0.1, 0.3]]) y = Tensor([[1.0, 0.5], [0.7, 0.3]]) # The Tiki-taka learning rule can be implemented using the transfer device. rpu_config = UnitCellRPUConfig( device=TransferCompound( # Devices that compose the Tiki-taka compound. unit_cell_devices=[ SoftBoundsDevice(w_min=-0.3, w_max=0.3), SoftBoundsDevice(w_min=-0.6, w_max=0.6) ], # Make some adjustments of the way Tiki-Taka is performed. units_in_mbatch=True, # batch_size=1 anyway transfer_every=2, # every 2 batches do a transfer-read n_cols_per_transfer=1, # one forward read for each transfer gamma=0.0, # all SGD weight in second device scale_transfer_lr=True, # in relative terms to SGD LR transfer_lr=1.0, # same transfer LR as for SGD ) ) # Make more adjustments (can be made here or above). rpu_config.forward.inp_res = 1/64. # 6 bit DAC # same backward pass settings as forward rpu_config.backward = rpu_config.forward
def get_rpu_config(device: Union[PulsedDevice, UnitCell], io_pars: IOParameters) \ -> Union[SingleRPUConfig, UnitCellRPUConfig]: if isinstance(device, PulsedDevice): return SingleRPUConfig(device=device, forward=io_pars) return UnitCellRPUConfig(device=device, forward=io_pars)
def get_rpu_config(self): return UnitCellRPUConfig(device=DifferenceUnitCellDevice( unit_cell_devices=[ConstantStepDevice(w_max_dtod=0, w_min_dtod=0) ]))
from aihwkit.simulator.configs import UnitCellRPUConfig from aihwkit.simulator.configs.utils import VectorUnitCellUpdatePolicy from aihwkit.simulator.configs.devices import ( ConstantStepDevice, VectorUnitCell ) from aihwkit.simulator.rpu_base import cuda # Prepare the datasets (input and expected output). x = Tensor([[0.1, 0.2, 0.4, 0.3], [0.2, 0.1, 0.1, 0.3]]) y = Tensor([[1.0, 0.5], [0.7, 0.3]]) # Define a single-layer network, using a vector device having multiple # devices per crosspoint. Each device can be arbitrarily defined rpu_config = UnitCellRPUConfig() # 3 arbitrary single unit cell devices (of the same type) per cross-point. rpu_config.device = VectorUnitCell( unit_cell_devices=[ ConstantStepDevice(w_max=0.3), ConstantStepDevice(w_max_dtod=0.4), ConstantStepDevice(up_down_dtod=0.1), ]) # Only one of the devices should receive a single update. # That is selected randomly, the effective weights is the sum of all # weights. rpu_config.device.update_policy = VectorUnitCellUpdatePolicy.SINGLE_RANDOM model = AnalogLinear(4, 2, bias=True, rpu_config=rpu_config)
from aihwkit.simulator.configs.devices import ( ConstantStepDevice, VectorUnitCell, LinearStepDevice, SoftBoundsDevice, ReferenceUnitCell) from aihwkit.simulator.rpu_base import cuda # Prepare the datasets (input and expected output). x = Tensor([[0.1, 0.2, 0.4, 0.3], [0.2, 0.1, 0.1, 0.3]]) y = Tensor([[1.0, 0.5], [0.7, 0.3]]) # Define a single-layer network, using a vector device having multiple # devices per crosspoint. Each device can be arbitrarily defined rpu_config = UnitCellRPUConfig() # 3 arbitrary devices per cross-point. rpu_config.device = VectorUnitCell( unit_cell_devices=[ ReferenceUnitCell(unit_cell_devices=[SoftBoundsDevice(w_max=1.0)]), ConstantStepDevice(), LinearStepDevice(w_max_dtod=0.4), SoftBoundsDevice() ]) # Only one of the devices should receive a single update. # That is selected randomly, the effective weights is the sum of all # weights. rpu_config.device.update_policy = VectorUnitCellUpdatePolicy.SINGLE_RANDOM
from aihwkit.optim.analog_sgd import AnalogSGD from aihwkit.simulator.configs import UnitCellRPUConfig from aihwkit.simulator.configs.devices import (ConstantStepDevice, VectorUnitCellDevice, LinearStepDevice, SoftBoundsDevice) from aihwkit.simulator.rpu_base import cuda # Prepare the datasets (input and expected output). x = Tensor([[0.1, 0.2, 0.4, 0.3], [0.2, 0.1, 0.1, 0.3]]) y = Tensor([[1.0, 0.5], [0.7, 0.3]]) # Define a single-layer network, using a vector device having multiple # devices per crosspoint. Each device can be arbitrarily defined rpu_config = UnitCellRPUConfig() # 3 arbitrary devices per cross-point. rpu_config.device = VectorUnitCellDevice(unit_cell_devices=[ ConstantStepDevice(), LinearStepDevice(w_max_dtod=0.4), SoftBoundsDevice() ]) # Only one of the devices should receive a single update. rpu_config.device.single_device_update = True # That is selected randomly, the effective weights is the sum of all # weights. rpu_config.device.single_device_update_random = True model = AnalogLinear(4, 2, bias=True, rpu_config=rpu_config)