Beispiel #1
0
script python to plot some outpus of the model
@author: jbrlod
"""
import os
import numpy as np
import matplotlib.pyplot as plt
import shutil
import xarray as xr
from baseutil import dataset

#change  the trainingset name  if necessary (can be a testing dataset)
trainingname = 'training-small.nc'

#data directory
datadir = '../data'
ds = dataset(basename=os.path.join(datadir, trainingname))

from keras.models import load_model
from modelutil import masked_mse

# name of the neural networks
name = 'model_4layers'

# outputname
outname = 'dataset_nn.nc'

model = load_model(os.path.join(datadir, name),
                   custom_objects={'masked_mse': masked_mse})

ypredict = xr.DataArray(model.predict(ds.X), coords=ds.yt.coords)
Beispiel #2
0
]  #,'temporal','bintemp']             # list of models directories based on the inputs data models

# TRAINING_SET_NAME & WEIGHTS BASENAME & DATA DIRECTORY
count = 0
#for i, _ in enumerate(appPattern):
# input files for training
trainingName = 'training_' + appPattern[0] + '.nc'
weightsName = 'weights_' + appPattern[0] + '.nc'
# Data Directory
trainingdir = '/net/argos/data/parvati/mkouassi/share/data/' + appPattern[0]

# Loading input dataset (training dataset)
trainingds = os.path.join(trainingdir, trainingName)

# CALL THE DATASET CLASS
ds = dataset(basename=trainingds)

# BUILDING of Binary input mask  and loss function weight masks
weightdsName = os.path.join(trainingdir, weightsName)
wds = weights_mask(trainingds, weightdsName, weight_c=0.1, weight_n=1)
weight = wds.weights.expand_dims('canal', 3)
# building of binary mask  for classic or binary entry
Xbinary = wds.bmask.expand_dims('canal', 3)
#f Choix du type d'entree : Binaire ou classic
if EntryData[0]:  # Classic
    Xbinary.values = np.ones_like(Xbinary.values)
elif EntryData[1]:  # Binary
    Xbinary.values = np.array(Xbinary.values, dtype='int')

## Concatenation des bases des dataArray 'yt',weights
yt_2dim = xr.concat((ds.yt, weight), dim='canal')
Beispiel #3
0
     ddir = alldir[2]
     datadir = '../data/square/'+ddir
     pattern = allPattern[2]
     testname = alltestname[2]
     weightsName = 'Weights_Square1.nc'
     
 elif (inputds == AllInputDs[3]):
     ddir = alldir[3]
     datadir = '../data/square/'+ddir
     pattern = allPattern[3]
     testname = alltestname[3] 
     weightsName = 'Weights_Squares.nc'
     
 
 # CALL THE DATASET CLASS    
 ds = dataset(basename=os.path.join(datadir,testname))
 
 # BUILDING of Binary input mask  and loss function weight masks 
 Inputds = os.path.join(datadir,testname)
 weightdsName = os.path.join(datadir,weightsName)
 wds = weights_mask(Inputds, weightdsName, weight_c=0.1, weight_n=1)
 weight = wds.weights.expand_dims('canal',3)
 Xbinary = wds.bmask.expand_dims('canal',3)
 ## Concatenation des bases des dataArray 'yt',weights
 yt_2dim = xr.concat((ds.yt, weight),dim='canal')
     
 ## MODEL DIRECTORY
 from keras.models import load_model
 from modelutil import context_mse, masked_mse
 
 allLoss = ['mmse','cmse']; 
Beispiel #4
0
]  #,'temporal','bintemp']             # list of models directories based on the inputs data models

# TRAINING_SET_NAME & WEIGHTS BASENAME & DATA DIRECTORY
count = 0
for i, _ in enumerate(appPattern):
    # input files for training
    trainingName = 'training_' + appPattern[i] + '.nc'
    weightsName = 'weights_' + appPattern[i] + '.nc'
    # Data Directory
    trainingdir = '../data/' + appPattern[i]

    # Loading input dataset (training dataset)
    trainingds = os.path.join(trainingdir, trainingName)

    # CALL THE DATASET CLASS
    ds = dataset(basename=trainingds)

    #    # BUILDING of Binary input mask  and loss function weight masks
    weightdsName = os.path.join(trainingdir, weightsName)
    wds = weights_mask(trainingds, weightdsName, weight_c=0.1, weight_n=1)
    weight = wds.weights.expand_dims('canal', 3)
    #    #building of binary mask  for classic or binary entry
    Xbinary = wds.bmask.expand_dims('canal', 3)
    for _, bincl in enumerate(EntryData):
        if (bincl == EntryData[0]):
            Xbinary.values = np.ones_like(Xbinary.values)
        elif (bincl == EntryData[1]):
            Xbinary.values = np.array(Xbinary.values, dtype='int')

        ## Concatenation des bases des dataArray 'yt',weights
        #YT_2dim = np.stack([ds.yt, weight], axis = 3); YT_2dim = YT_2dim.squeeze()
Beispiel #5
0
# name of the input data
basename = 'medchl-small.nc'

# name of the output training dataset
trainingname = 'training-small.nc'

# option for masking
mfun = make_mask #masking function (in baseutil)
margs = \
   {'msize':8, #size of the mask
    'nmask':1} #number of mask per image



#Make the dateset
ds = dataset(srcname = os.path.join(datadir,basename) , overwrite = True)
ds.masking(mfun = mfun, **margs)
ds.savebase(os.path.join(datadir,trainingname))

#plot some random images
PLOT = True

#save the images
SAVE = True

#Plot some images
if PLOT:
    # example dir
    exampledir = os.path.join('../figures/examples/',os.path.splitext(trainingname)[0])
    shutil.rmtree(exampledir,ignore_errors=True) 
Beispiel #6
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from baseutil import dataset
#change le path of the training set if necessary
ds = dataset(basename='../data/trainingset.nc')
#for some reasons, it seems to work better if dataset is instaciated
#before importing keras

from keras.models import Model
from keras.layers.convolutional import Conv2DTranspose, Conv2D
from keras.layers.core import Activation
from keras.layers import MaxPooling2D, concatenate, Input
from modelutil import masked_mse
#import xarray as xr

name = 'model_7layers'
#ds = xr.open_dataset('../data/training.nc')
#X = ds['X'].values #.values facultatif, c'est pour avoir un np.array
#yt = ds['yt'].values

#dimension des données d'entrée
img_rows, img_cols = 64, 64
n_feat_in, filter_size, filter_size = 25, 3, 3


##tester avec 1 puis implermenter avec plus de phases internes de conv et deconv
def get_model():
    #mettre kes inputs
    inputs = Input(shape=(64, 64, 1))
    #convolution classique
    conv_1 = Conv2D(20, (3, 3), strides=(1, 1), padding='same')(inputs)