def init(calibration_path: str, mock: bool, mock_noise_std: Real, mock_gain: Real): """ Initialize hxtorch connection and load calibration. Caveat: This also measures and sets the gain, therefore do this before initializing the model (as this influences layer initialization). :param calibration_path: Path of custom calibration :param mock: Whether to simulate the hardware :param mock_noise_std: Standard deviation of artificial noise in mock mode :param mock_gain: Multiplication gain used in mock mode """ if mock: mock_parameter = hxtorch.MockParameter(noise_std=mock_noise_std, gain=mock_gain) log.info(f"Initialize mock mode with {mock_parameter}") else: log.info("Initialize with BrainScaleS-2 ASIC") if calibration_path: log.info(f"Apply calibration from: '{calibration_path}'") hxtorch.init_hardware(hxtorch.CalibrationPath(calibration_path)) else: log.info("Apply latest nightly calibration") hxtorch.init_hardware() # defaults to a nightly default calib mock_parameter = hxtorch.measure_mock_parameter() # set mock parameter used in mock mode and backward pass hxtorch.set_mock_parameter(mock_parameter)
def main(): # initialize the connection, uses default nightly calib for the setup hxtorch.init_hardware() x = torch.full((128, ), 10.) # input range: 0...31 w = torch.full((128, 256), 20.) # weight range: -63...63 # this weight uses the whole upper half of the chip out = hxtorch.matmul( x, w, # noqa E121. The same as in `torch.matmul` num_sends= 1, # number of subsequent sends of the input in the same integration step wait_between_events=5 ) # wait between sending the individual vector entries (in FPGA cycles) # output range: -128...127 log = hxtorch.logger.get("hxtorch.examples.minimal") log.info(f"Input (mean): {x.mean()}, " f"weight (mean): {w.mean()}, " f"output (mean): {out.mean()}") hxtorch.release_hardware()
def setUpClass(cls): hxtorch.init_hardware() mock_parameter = hxtorch.measure_mock_parameter() hxtorch.set_mock_parameter(mock_parameter) cls.noise_std = mock_parameter.noise_std cls.gain = mock_parameter.gain
def setUpClass(cls): hxtorch.init_hardware()
def test_measure(self): hxtorch.init_hardware() mock_parameter = hxtorch.measure_mock_parameter() hxtorch.release_hardware() self.assertGreater(mock_parameter.gain, 0) self.assertLessEqual(mock_parameter.gain, 1)
def setUpClass(cls) -> None: hxtorch.init_hardware()