def renderAndSaveModelImageFromJson(paramFlowJson, fout, jobDir=None): tmpParser = DLSDesignerFlowsParser(paramFlowJson) if jobDir is not None: if not os.path.isdir(jobDir): jobDir = None tmpModel, _ = tmpParser.buildKerasTrainer(pathJobDir=jobDir) kplot(tmpModel.model, to_file=fout, show_shapes=True) return fout
# In[ ]: #genTrainer = GeneratorTrainer(tracker, generator, processor, validator) #Avoid maximum recursion limit exception when pickling by increasing limit from ~1000 by default sys.setrecursionlimit(10000) #genTrainer.train(7, 5, batchSize, lnr, lnrdy) tracker.train(trainGenerator(), offEpochs, offBatches, batchSize, validator) tracker.save(trackerModelPath) # In[13]: #tracker.load(trackerModelPath) from keras.utils.visualize_util import plot as kplot kplot(regressor.model.layer, to_file='/home/fhdiaze/model.png') # # TRACKER TESTING # ## VARIABLES # In[14]: fps = 15 outDir = "/home/fhdiaze/Data/Videos/" # ## VOT TESTING # In[15]: tracker.setStateful(True, 1)
print(t1) print(t2) print(t3) print(t4) Test.printTest() #################################### if __name__ == '__main__': foutJson = 'keras-model-generated.json' fnFlowJson = '../../../data-test/test-models-json/testnet_multi_input_multi_output_v1.json' flowParser = DLSDesignerFlowsParserV2(fnFlowJson) flowParser.cleanAndValidate() # (1) Build connected and validated Model Node-flow (DLS-model-representation) flowParser.buildConnectedFlow() # (2) Generate dict-based Json Kearas model (from DLS model representation) modelJson = flowParser.generateModelKerasConfigJson() # (3) Export generated json model to file with open(foutJson, 'w') as f: f.write(json.dumps(modelJson, indent=4)) # (4) Try to load generated Keras model from json-file with open(foutJson, 'r') as f: model = keras.models.model_from_json(f.read()) # (5) Visualize & summary of the model: check connections! fimgModel = '%s-figure.jpg' % foutJson kplot(model, fimgModel, show_shapes=True) plt.imshow(skio.imread(fimgModel)) plt.grid(True) plt.show() model.summary()
def plot(self): filename = join(self.igor.model_location, self.igor.saving_prefix, 'model_visualization.png') kplot(self.model, to_file=filename) self.logger.debug("+ Model visualized at {}".format(filename))
# return os.path.basename(fout) @staticmethod def renderModelImageToTmpFile(paramFlowJson, odir=None, pref='netgraph_', jobDir=None): if odir is not None: if not os.path.isdir(odir): os.mkdir(odir) fout = tempfile.mktemp(prefix=pref, suffix='.png', dir=odir) tret = DLSDesignerFlowsParser.renderAndSaveModelImageFromJson( paramFlowJson, fout, jobDir) return tret #################################### if __name__ == '__main__': fnFlowJson = '../../../data-test/test-models-json/test_cnn1.json' fnFlowJsonOut = '../../../data-test/test-models-json/test_cnn1_out.json' dirJobs = '../../../data-test/test-models-json/' flowParser = DLSDesignerFlowsParser(fnFlowJson) flowParser.cleanAndValidate() # flowParser.exportConfigFlow(fnFlowJsonOut) modelTrainer, modelConfig = flowParser.buildKerasTrainer( pathJobDir=dirJobs) modelTrainer.saveModelState('/tmp/', isSaveWeights=False) modelJson = flowParser.buildKerasModelInJson() kplot(modelTrainer.model, to_file='/tmp/keras_draw.png', show_shapes=True) print('---------------') print(json.dumps(modelJson, indent=4))
# pathTestModel='../../../data-test/test-models-json/test_basic_cnn_network_v1.json' pathTestModel='../../../data-test/test-models-json/test_basic_cnn_network_v1_with_train_params_v1.json' if __name__ == '__main__': dirData = dlsutils.getPathForDatasetDir() dirModels = dlsutils.getPathForModelsDir() dbWatcher = DatasetsWatcher(dirData) dbWatcher.refreshDatasetsInfo() assert ( len(dbWatcher.dictDbInfo.keys())>0 ) dbInfoTest = dbWatcher.dictDbInfo[dbWatcher.dictDbInfo.keys()[0]] print ('Dataset for tests : [ %s ]' % dbInfoTest.__str__()) # with open(pathTestModel, 'r') as f: jsonModelData = json.load(f) modelParser = DLSDesignerFlowsParser(jsonModelData) modelTrainer, modelConfig = modelParser.buildKerasTrainer() batcherDB = BatcherImage2DLMDB(dbInfoTest.pathDB) # modelTrainerAdjusted = modelTrainer.adjustModelInputOutput2DBData(modelTrainer.model, batcherDB) for ii,ll in enumerate(modelTrainerAdjusted.layers): print ('[%d/%d] : %s, shape: inp=%s, out=%s' % (ii,len(modelTrainerAdjusted.layers), ll, ll.input_shape, ll.output_shape)) print ('*** Total Model params: %d' % modelTrainerAdjusted.count_params()) # fimg = '/tmp/keras_draw.png' kplot(modelTrainerAdjusted, to_file=fimg, show_shapes=True) img = io.imread(fimg) plt.imshow(img) plt.show()
#!/usr/bin/python # -*- coding: utf-8 -*- __author__ = 'ar' import skimage.io as io import matplotlib.pyplot as plt from keras.utils.visualize_util import plot as kplot from app.backend.core.models.flow_parser import DLSDesignerFlowsParser if __name__ == '__main__': pathModelJson = '../../../data-test/test-models-json/test_cnn1.json' flowParser = DLSDesignerFlowsParser(pathModelJson) flowParser.cleanAndValidate() print('Model-flow isOk: [%s]' % flowParser.isOk()) kerasTrainer, cfgSolver, layersDict = flowParser.buildKerasTrainer( isPrecalculateLayersDict=True) print(kerasTrainer.model) # fimg = '/tmp/keras_draw.png' kplot(kerasTrainer.model, to_file=fimg, show_shapes=True) img = io.imread(fimg) plt.imshow(img) plt.show()
pathProto = pp['proto'] pathWeights = pp['weights'] # (1) convert Caffe->Keras pathKerasModelOutput = '%s-kerasmodel.json' % os.path.splitext( pathProto)[0] print('[%d/%d] %s --> %s' % (ii, len(lstModelsPaths), pathProto, pathKerasModelOutput)) try: model = caffeConvert.caffe_to_keras(pathProto, caffemodelPath=None, debug=False) with open(pathKerasModelOutput, 'w') as f: f.write(model.to_json(indent=4)) # (2) try to load and plot Keras model with open(pathKerasModelOutput, 'r') as f: jsonData = f.read() kerasModel = keras.models.model_from_json( jsonData, custom_objects=dictExtraLayers) pathKerasModelImage = '%s-kerasmodel.jpg' % os.path.splitext( pathProto)[0] kplot(kerasModel, to_file=pathKerasModelImage, show_shapes=True) img = io.imread(pathKerasModelImage) plt.subplot(1, numProto, ii + 1) plt.imshow(img) plt.title(os.path.splitext(os.path.basename(pathProto))[0]) except Exception as err: print( '\t**ERROR** Cant convert and visualize model [%s] : %s, skip...' % (pathProto, err)) plt.show()