Example #1
0
def model_from_config(config, custom_objects={}):
    from Keras.utils.layer_utils import layer_from_config
    if isinstance(config, list):
        raise Exception(
            '`model_fom_config` expects a dictionary, not a list. '
            'Maybe you meant to use `Sequential.from_config(config)`?')
    return layer_from_config(config, custom_objects=custom_objects)
Example #2
0
def model_from_json(json_string, custom_objects={}):
    '''Parses a JSON model configuration file
    and returns a model instance.
    '''
    import json
    from Keras.utils.layer_utils import layer_from_config
    config = json.loads(json_string)
    return layer_from_config(config, custom_objects=custom_objects)
Example #3
0
def model_from_yaml(yaml_string, custom_objects={}):
    '''Parses a yaml model configuration file
    and returns a model instance.
    '''
    import yaml
    from Keras.utils.layer_utils import layer_from_config
    config = yaml.load(yaml_string)
    return layer_from_config(config, custom_objects=custom_objects)
Example #4
0
    def from_config(cls, config, layer_cache=None):
        '''Supports legacy formats
        '''
        from Keras.utils.layer_utils import layer_from_config
        from Keras.layers import Merge
        assert type(config) is list

        if not layer_cache:
            layer_cache = {}

        def normalize_legacy_config(conf):
            if 'class_name' not in conf:
                class_name = conf['name']
                name = conf.get('custom_name')
                conf['name'] = name
                new_config = {
                    'class_name': class_name,
                    'config': conf,
                }
                return new_config
            return conf

        # the model we will return
        model = cls()

        def get_or_create_layer(layer_data):
            if layer_data['class_name'] == 'Sequential':
                return Sequential.from_config(layer_data['config'],
                                              layer_cache=layer_cache)
            name = layer_data['config'].get('name')
            if name in layer_cache:
                return layer_cache[name]
            layer = layer_from_config(layer_data)
            layer_cache[name] = layer
            return layer

        first_layer = config[0]
        first_layer = normalize_legacy_config(first_layer)
        if first_layer['class_name'] == 'Merge':
            merge_inputs = []
            first_layer_config = first_layer['config']
            for merge_input_config in first_layer_config.pop('layers'):
                merge_input = layer_from_config(merge_input_config)
                merge_inputs.append(merge_input)
            first_layer_config['layers'] = merge_inputs
            merge = Merge.from_config(first_layer_config)
            model.add(merge)
        else:
            layer = get_or_create_layer(first_layer)
            model.add(layer)

        for conf in config[1:]:
            conf = normalize_legacy_config(conf)
            layer = get_or_create_layer(conf)
            model.add(layer)
        return model
Example #5
0
 def get_or_create_layer(layer_data):
     if layer_data['class_name'] == 'Sequential':
         return Sequential.from_config(layer_data['config'],
                                       layer_cache=layer_cache)
     name = layer_data['config'].get('name')
     if name in layer_cache:
         return layer_cache[name]
     layer = layer_from_config(layer_data)
     layer_cache[name] = layer
     return layer
Example #6
0
    def from_config(cls, config):
        # TODO: test legacy support
        from Keras.utils.layer_utils import layer_from_config

        def normalize_legacy_config(conf):
            if 'class_name' not in conf:
                class_name = conf['name']
                name = conf.get('custom_name')
                conf['name'] = name
                new_config = {
                    'class_name': class_name,
                    'config': conf,
                }
                return new_config
            return conf

        graph = cls()
        inputs = config.get('input_config')
        for input in inputs:
            graph.add_input(**input)

        nodes = config.get('node_config')
        for node in nodes:
            layer_config = config['nodes'][node['name']]
            layer_config = normalize_legacy_config(layer_config)
            if 'layer' in node:
                # for add_shared_node
                node['layer'] = layer_from_config(node['layer'])
            else:
                layer = layer_from_config(layer_config)
                node['layer'] = layer

            node['create_output'] = False  # outputs will be added below
            if layer_config.get('shared'):
                graph.add_shared_node(**node)
            else:
                graph.add_node(**node)

        outputs = config.get('output_config')
        for output in outputs:
            graph.add_output(**output)
        return graph
Example #7
0
 def from_config(cls, config):
     from Keras.utils.layer_utils import layer_from_config
     layer = layer_from_config(config.pop('layer'))
     return cls(layer, **config)