def depthwise_conv3d_args_preprocessor(args, kwargs): converted = [] if 'init' in kwargs: init = kwargs.pop('init') kwargs['depthwise_initializer'] = init converted.append(('init', 'depthwise_initializer')) args, kwargs, _converted = conv3d_args_preprocessor(args, kwargs) return args, kwargs, converted + _converted legacy_depthwise_conv3d_support = generate_legacy_interface( allowed_positional_args=['filters', 'kernel_size'], conversions=[('nb_filter', 'filters'), ('subsample', 'strides'), ('border_mode', 'padding'), ('dim_ordering', 'data_format'), ('b_regularizer', 'bias_regularizer'), ('b_constraint', 'bias_constraint'), ('bias', 'use_bias')], value_conversions={ 'dim_ordering': { 'tf': 'channels_last', 'th': 'channels_first', 'default': None } }, preprocessor=depthwise_conv3d_args_preprocessor)
def depthwise_conv2d_args_preprocessor(args, kwargs): converted = [] if 'init' in kwargs: init = kwargs.pop('init') kwargs['depthwise_initializer'] = init converted.append(('init', 'depthwise_initializer')) args, kwargs, _converted = conv2d_args_preprocessor(args, kwargs) return args, kwargs, converted + _converted legacy_depthwise_conv2d_support = generate_legacy_interface( allowed_positional_args=['filters', 'kernel_size'], conversions=[('nb_filter', 'filters'), ('subsample', 'strides'), ('border_mode', 'padding'), ('dim_ordering', 'data_format'), ('b_regularizer', 'bias_regularizer'), ('b_constraint', 'bias_constraint'), ('bias', 'use_bias')], value_conversions={'dim_ordering': {'tf': 'channels_last', 'th': 'channels_first', 'default': None}}, preprocessor=depthwise_conv2d_args_preprocessor) class DepthwiseConv2D(Conv2D): @legacy_depthwise_conv2d_support def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid',
import os import numpy as np from keras import backend as K from keras.legacy.interfaces import generate_legacy_interface, recurrent_args_preprocessor from keras.models import model_from_json legacy_prednet_support = generate_legacy_interface( allowed_positional_args=[ 'stack_sizes', 'R_stack_sizes', 'A_filt_sizes', 'Ahat_filt_sizes', 'R_filt_sizes' ], conversions=[('dim_ordering', 'data_format'), ('consume_less', 'implementation')], value_conversions={ 'dim_ordering': { 'tf': 'channels_last', 'th': 'channels_first', 'default': None }, 'consume_less': { 'cpu': 0, 'mem': 1, 'gpu': 2 } }, preprocessor=recurrent_args_preprocessor) # Convert old Keras (1.2) json models and weights to Keras 2.0 def convert_model_to_keras2(old_json_file, old_weights_file, new_json_file, new_weights_file): from prednet import PredNet
from keras.engine import InputSpec from keras import backend as K from keras.utils.generic_utils import to_list from keras.utils.generic_utils import deserialize_keras_object from keras.legacy.interfaces import generate_legacy_interface from scipy.sparse import csr_matrix, issparse from functools import partial from .backend_keras import is_tensor, dumps_sparse, loads_sparse from .backend_keras import make_sparse, sp_floor from .utils import patch_assert_input_compatibility from .utils import patch_predict_model import numpy as np legacy_entitie_support = generate_legacy_interface( allowed_positional_args=['units'], conversions=[('output_dim', 'units')]) # esto solo aporta claridad a los diagramas de las redes ya que todo queda encapsulado # como una clase de keras y se ve como un nodo en el grafo, no es m\'as que una capa # densa por cada entidad(2 entidades) y otra capa densa mas grande conectada al concat # de las capas antes mencionadas @patch_assert_input_compatibility class RelationLayer(Layer): @legacy_entitie_support def __init__(self, units, **kwargs): if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'), ) super(RelationLayer, self).__init__(**kwargs) self.input_spec = InputSpec(min_ndim=2) self.units = units