def convert_sequential_model(model, num_dims=None, mxts_mode=MxtsMode.DeepLIFT): converted_layers = [] if (model.layers[0].input_shape is not None): input_shape = model.layers[0].input_shape[1:] num_dims_input = len(input_shape) + 1 #+1 for the batch axis assert num_dims is None or num_dims_input==num_dims,\ "num_dims argument of "+str(num_dims)+" is incompatible with"\ +" the number of dims in layers[0].input_shape which is: "\ +str(model.layers[0].input_shape) num_dims = num_dims_input else: input_shape = None converted_layers.append( blobs.Input_FixedDefault(default=0.0, num_dims=num_dims, shape=input_shape, name="input")) for layer_idx, layer in enumerate(model.layers): conversion_function = layer_name_to_conversion_function[ layer.get_config()[KerasKeys.name]] converted_layers.extend( conversion_function(layer=layer, name=str(layer_idx), mxts_mode=mxts_mode)) connect_list_of_layers(converted_layers) converted_layers[-1].build_fwd_pass_vars() return models.SequentialModel(converted_layers)
def convert_sequential_model( model_config, nonlinear_mxts_mode=\ NonlinearMxtsMode.DeepLIFT_GenomicsDefault, verbose=True, dense_mxts_mode=DenseMxtsMode.Linear, conv_mxts_mode=ConvMxtsMode.Linear, maxpool_deeplift_mode=default_maxpool_deeplift_mode, layer_overrides={}, custom_conversion_funcs={}): if (verbose): print("nonlinear_mxts_mode is set to: " + str(nonlinear_mxts_mode)) sys.stdout.flush() converted_layers = [] batch_input_shape = model_config[0]['config'][KerasKeys.batch_input_shape] converted_layers.append( layers.core.Input(batch_shape=batch_input_shape, name="input")) #converted_layers is actually mutated to be extended with the #additional layers so the assignment is not strictly necessary, #but whatever converted_layers = sequential_container_conversion( config=model_config, name="", verbose=verbose, nonlinear_mxts_mode=nonlinear_mxts_mode, dense_mxts_mode=dense_mxts_mode, conv_mxts_mode=conv_mxts_mode, maxpool_deeplift_mode=maxpool_deeplift_mode, converted_layers=converted_layers, layer_overrides=layer_overrides) converted_layers[-1].build_fwd_pass_vars() return models.SequentialModel(converted_layers)
def convert_sequential_model( model, num_dims=None, nonlinear_mxts_mode=NonlinearMxtsMode.DeepLIFT, verbose=True, dense_mxts_mode=DenseMxtsMode.Linear, maxpool_deeplift_mode=default_maxpool_deeplift_mode): converted_layers = [] if (model.layers[0].input_shape is not None): input_shape = model.layers[0].input_shape assert input_shape[0] is None #batch axis num_dims_input = len(input_shape) assert num_dims is None or num_dims_input==num_dims,\ "num_dims argument of "+str(num_dims)+" is incompatible with"\ +" the number of dims in layers[0].input_shape which is: "\ +str(model.layers[0].input_shape) num_dims = num_dims_input else: input_shape = None converted_layers.append( blobs.Input(num_dims=num_dims, shape=input_shape, name="input")) #converted_layers is actually mutated to be extended with the #additional layers so the assignment is not strictly necessary, #but whatever converted_layers = sequential_container_conversion( layer=model, name="", verbose=verbose, nonlinear_mxts_mode=nonlinear_mxts_mode, dense_mxts_mode=dense_mxts_mode, maxpool_deeplift_mode=maxpool_deeplift_mode, converted_layers=converted_layers) deeplift.util.connect_list_of_layers(converted_layers) converted_layers[-1].build_fwd_pass_vars() return models.SequentialModel(converted_layers)