Beispiel #1
0
def convert_from_keras_model(model, backend, output_dir='my-hls-test', project_name='myproject',
    fpga_part='xcku115-flvb2104-2-i', clock_period=5, hls_config={}):

    config = create_config(output_dir=output_dir,
        project_name=project_name, backend=backend.name, fpga_part=fpga_part, clock_period=clock_period)
    config['KerasModel'] = model

    model_config = hls_config.get('Model', None)
    if model_config is not None:
        if not all(k in model_config for k in ('Precision', 'ReuseFactor')):
            raise Exception('Precision and ReuseFactor must be provided in the hls_config')
    else:
        model_config = {}
        model_config['Precision'] = backend.get_pstring(16,6)
        model_config['ReuseFactor'] = '1'
    config['HLSConfig']['Model'] = model_config

    if 'LayerName' in hls_config:
        config['HLSConfig']['LayerName'] = hls_config['LayerName']

    if 'LayerType' in hls_config:
        config['HLSConfig']['LayerType'] = hls_config['LayerType']

    if 'Optimizers' in hls_config:
        config['HLSConfig']['Optimizers'] = hls_config['Optimizers']

    return keras_to_hls(config)
Beispiel #2
0
def convert_from_keras_model(model,
                             output_dir='my-hls-test',
                             project_name='myproject',
                             input_data_tb=None,
                             output_data_tb=None,
                             backend='Vivado',
                             hls_config={},
                             **kwargs):
    """Convert to hls4ml model based on the provided configuration.
    Args:
        model: Keras model to convert
        output_dir (str, optional): Output directory of the generated HLS
            project. Defaults to 'my-hls-test'.
        project_name (str, optional): Name of the HLS project.
            Defaults to 'myproject'.
        input_data_tb (str, optional): String representing the path of input data in .npy or .dat format that will be
            used during csim and cosim.
        output_data_tb (str, optional): String representing the path of output data in .npy or .dat format that will be
            used during csim and cosim.
        backend (str, optional): Name of the backend to use, e.g., 'Vivado'
            or 'Quartus'.
        board (str, optional): One of target boards specified in `supported_board.json` file. If set to `None` a default
            device of a backend will be used. See documentation of the backend used.
        part (str, optional): The FPGA part. If set to `None` a default part of a backend will be used.
            See documentation of the backend used. Note that if `board` is specified, the part associated to that board
            will overwrite any part passed as a parameter.
        clock_period (int, optional): Clock period of the design.
            Defaults to 5.
        io_type (str, optional): Type of implementation used. One of
            'io_parallel' or 'io_serial'. Defaults to 'io_parallel'.
        hls_config (dict, optional): The HLS config.
        kwargs** (dict, optional): Additional parameters that will be used to create the config of the specified backend
    Raises:
        Exception: If precision and reuse factor are not present in 'hls_config'
    Returns:
        ModelGraph: hls4ml model.
    """

    config = create_config(output_dir=output_dir,
                           project_name=project_name,
                           backend=backend,
                           **kwargs)

    config['KerasModel'] = model
    config['InputData'] = input_data_tb
    config['OutputPredictions'] = output_data_tb
    config['HLSConfig'] = {}

    model_config = hls_config.get('Model', None)
    config['HLSConfig']['Model'] = _check_model_config(model_config)

    _check_hls_config(config, hls_config)

    return keras_to_hls(config)
Beispiel #3
0
def hls_model(settings):
  io_type = settings[0]
  strategy = settings[1]
  config = yaml.safe_load(open('../../example-models/config-files/qkeras_mnist_cnn_config.yml').read())
  config['KerasJson'] = '../../example-models/keras/qkeras_mnist_cnn.json'
  config['KerasH5'] = '../../example-models/keras/qkeras_mnist_cnn_weights.h5'
  config['OutputDir'] = 'hls4mlprj_cnn_mnist_{}_{}'.format(io_type, strategy)
  config['IOType'] = io_type
  config['HLSConfig']['Model']['Strategy'] = strategy
  config['HLSConfig']['LayerName']['softmax']['Strategy'] = 'Stable'
  hls_model = keras_to_hls(config)
  hls_model.compile()
  return hls_model
Beispiel #4
0
def hls_model(settings):
  io_type = settings[0]
  strategy = settings[1]
  yml_path = example_model_path / 'config-files/qkeras_mnist_cnn_config.yml'
  with yml_path.open('r') as f:
    config = yaml.safe_load(f.read())
  config['KerasJson'] = str(example_model_path / 'keras/qkeras_mnist_cnn.json')
  config['KerasH5'] = str(example_model_path / 'keras/qkeras_mnist_cnn_weights.h5')
  config['OutputDir'] = str(test_root_path / 'hls4mlprj_cnn_mnist_{}_{}'.format(io_type, strategy))
  config['IOType'] = io_type
  config['HLSConfig']['Model']['Strategy'] = strategy
  config['HLSConfig']['LayerName']['softmax']['Strategy'] = 'Stable'
  hls_model = keras_to_hls(config)
  hls_model.compile()
  return hls_model
Beispiel #5
0
def convert_from_yaml_config(yamlConfig):
    model = None
    if 'OnnxModel' in yamlConfig:
        if __onnx_enabled__:
            model = onnx_to_hls(yamlConfig)
        else:
            raise Exception("ONNX not found. Please install ONNX.")
    elif 'PytorchModel' in yamlConfig:
        if __pytorch_enabled__:
            model = pytorch_to_hls(yamlConfig)
        else:
            raise Exception("PyTorch not found. Please install PyTorch.")
    elif 'TensorFlowModel' in yamlConfig:
        if __tensorflow_enabled__:
            model = tf_to_hls(yamlConfig)
        else:
            raise Exception("TensorFlow not found. Please install TensorFlow.")
    else:
        model = keras_to_hls(yamlConfig)

    return model
Beispiel #6
0
def hls_model(settings):
    io_type = settings[0]
    strategy = settings[1]
    config = hls4ml.converters.create_config(
        output_dir='hls4mlprj_conv1d_{}_{}'.format(io_type, strategy))
    config['KerasJson'] = str(example_model_path / 'keras/KERAS_conv1d.json')
    config['KerasH5'] = str(example_model_path /
                            'keras/KERAS_conv1d_weights.h5')
    config['OutputDir'] = str(
        test_root_path / 'hls4mlprj_conv1d_{}_{}'.format(io_type, strategy))
    config['IOType'] = io_type

    hls_config = {
        'Model': {
            'Strategy': strategy,
            'ReuseFactor': 1,
            'Precision': 'ap_fixed<16,3,AP_RND_CONV,AP_SAT>'
        }
    }
    # Some model specific precision tuning
    config['LayerName'] = {}
    config['LayerName']['fc1_relu'] = {
        'Precision': {
            'weight': 'ap_fixed<16,3>',
            'result': 'ap_fixed<16,6,AP_RND_CONV,AP_SAT>'
        }
    }
    config['LayerName']['output_softmax'] = {
        'Precision': {
            'weight': 'ap_fixed<16,6>',
            'result': 'ap_fixed<16,6,AP_RND_CONV,AP_SAT>'
        }
    }
    config['LayerName']['output_softmax_softmax'] = {'Strategy': 'Stable'}
    config['HLSConfig'] = hls_config
    hls_model = keras_to_hls(config)
    hls_model.compile()
    return hls_model
Beispiel #7
0
def convert_from_config(config):
    """Convert to hls4ml model based on the provided configuration.

    Arguments:
        config: A string containing the path to the YAML configuration file on
            the filesystem or a dict containig the parsed configuration.

    Returns:
        HLSModel: hls4ml model.
    """

    if isinstance(config, str):
        yamlConfig = parse_yaml_config(config)
    else:
        yamlConfig = config

    model = None
    if 'OnnxModel' in yamlConfig:
        if __onnx_enabled__:
            model = onnx_to_hls(yamlConfig)
        else:
            raise Exception("ONNX not found. Please install ONNX.")
    elif 'PytorchModel' in yamlConfig:
        if __pytorch_enabled__:
            model = pytorch_to_hls(yamlConfig)
        else:
            raise Exception("PyTorch not found. Please install PyTorch.")
    elif 'TensorFlowModel' in yamlConfig:
        if __tensorflow_enabled__:
            model = tf_to_hls(yamlConfig)
        else:
            raise Exception("TensorFlow not found. Please install TensorFlow.")
    else:
        model = keras_to_hls(yamlConfig)

    return model
Beispiel #8
0
def convert_from_keras_model(model,
                             output_dir='my-hls-test',
                             project_name='myproject',
                             fpga_part='xcku115-flvb2104-2-i',
                             clock_period=5,
                             io_type='io_parallel',
                             hls_config={}):
    """Convert to hls4ml model based on the provided configuration.

    Args:
        model: Keras model to convert
        output_dir (str, optional): Output directory of the generated HLS
            project. Defaults to 'my-hls-test'.
        project_name (str, optional): Name of the HLS project.
            Defaults to 'myproject'.
        fpga_part (str, optional): The target FPGA device.
            Defaults to 'xcku115-flvb2104-2-i'.
        clock_period (int, optional): Clock period of the design.
            Defaults to 5.
        io_type (str, optional): Type of implementation used. One of
            'io_parallel' or 'io_serial'. Defaults to 'io_parallel'.
        hls_config (dict, optional): The HLS config.

    Raises:
        Exception: If precision and reuse factor are not present in 'hls_config'

    Returns:
        HLSModel: hls4ml model.
    """

    config = create_vivado_config(output_dir=output_dir,
                                  project_name=project_name,
                                  fpga_part=fpga_part,
                                  clock_period=clock_period,
                                  io_type=io_type)
    config['KerasModel'] = model

    model_config = hls_config.get('Model', None)
    if model_config is not None:
        if not all(k in model_config for k in ('Precision', 'ReuseFactor')):
            raise Exception(
                'Precision and ReuseFactor must be provided in the hls_config')
    else:
        model_config = {}
        model_config['Precision'] = 'ap_fixed<16,6>'
        model_config['ReuseFactor'] = '1'
    config['HLSConfig']['Model'] = model_config

    if 'LayerName' in hls_config:
        config['HLSConfig']['LayerName'] = hls_config['LayerName']

    if 'LayerType' in hls_config:
        config['HLSConfig']['LayerType'] = hls_config['LayerType']

    if 'Optimizers' in hls_config:
        config['HLSConfig']['Optimizers'] = hls_config['Optimizers']

    if 'SkipOptimizers' in hls_config:
        config['HLSConfig']['SkipOptimizers'] = hls_config['SkipOptimizers']

    return keras_to_hls(config)