コード例 #1
0
 def loadBatcherLMDB(self, dbJobID, sizeBatch):
     dirDataset = dlsutils.getPathForDatasetDir()
     pathLMDBJob = os.path.join(dirDataset, dbJobID)
     self.batcherLMDB = BatcherImage2DLMDB(pathLMDBJob, sizeBatch)
     self.sizeBatch = sizeBatch
     if not self.batcherLMDB.isOk():
         strErr = "[KERAS-TRAINER] Incorrect LMDB-data in [%s]" % dbJobID
         self.printError(strErr)
         raise Exception(strErr)
コード例 #2
0
 def buildModelTrainTaskDir(cfgModel):
     #
     if not isinstance(cfgModel, dict):
         with open(cfgModel, 'r') as f:
             cfgModel = json.load(f)
     #
     modelParser = DLSDesignerFlowsParser(cfgModel)
     modelTrainer, solverConfig = modelParser.buildKerasTrainer()
     #
     taskId = dlsutils.getUniqueTaskId(PREFIX_TASKS_DIR)
     dirWithModels = dlsutils.getPathForModelsDir()
     dirWithDatasets = dlsutils.getPathForDatasetDir()
     dirTaskOut = os.path.join(dirWithModels, taskId)
     #
     datasetId = solverConfig['dataset-id']
     dirDataset = os.path.join(dirWithDatasets, datasetId)
     dlsutils.makeDirIfNotExists(dirTaskOut)
     #
     # modelAdjusted = modelTrainer.adjustModelInputOutput2DBData(modelTrainer.model, dirDataset)
     modelAdjusted = modelTrainer.model
     foutConfigModel = os.path.join(dirTaskOut, CFG_MODEL_TRAIN)
     foutConfigNetwork = os.path.join(dirTaskOut, CFG_MODEL_NETWORK)
     foutConfigSolver = os.path.join(dirTaskOut, CFG_SOLVER)
     foutConfig = os.path.join(dirTaskOut, CFG_MODEL)
     with open(foutConfigNetwork, 'w') as f:
         f.write(json.dumps(cfgModel, indent=4))
     with open(foutConfigModel, 'w') as f:
         f.write(
             modelAdjusted.to_json(sort_keys=True,
                                   indent=4,
                                   separators=(',', ': ')))
     with open(foutConfigSolver, 'w') as f:
         f.write(json.dumps(solverConfig, indent=4))
     # prepare basic model config
     tdateTime = getDateTimeForConfig()
     if datasetId in dbapi.datasetWatcher.dictDbInfo.keys():
         dbName = dbapi.datasetWatcher.dictDbInfo[datasetId].cfg.getDBName()
     else:
         dbName = 'Unknown DB-Name'
     modelConfig = {
         'id': taskId,
         'dataset-id': datasetId,
         'dataset-name': dbName,
         'date': tdateTime['date'],
         'time': tdateTime['time'],
         'type': 'image2d-classification',
         'name': cfgModel['name'],
         'network': cfgModel['name'],
         'description': cfgModel['description']
     }
     with open(foutConfig, 'w') as f:
         f.write(json.dumps(modelConfig, indent=4))
     return (taskId, dirTaskOut)
コード例 #3
0
ファイル: task_db_image2d_cls.py プロジェクト: vitteemou/DLS
 def __init__(self, configJson):
     # (1) Task-constructor:
     Task.__init__(self)
     # (2) prepare db-directory with temporary saved config in Json format
     tdirDbId = dlsutils.getUniqueTaskId(self.prefixDataset)
     pathDatasets = dlsutils.getPathForDatasetDir()
     pathDirOut = os.path.abspath(os.path.join(pathDatasets, tdirDbId))
     dlsutils.makeDirIfNotExists(pathDirOut)
     pathCfgInp = os.path.join(pathDirOut, 'cfg-inp.json')
     with open(pathCfgInp, 'w') as f:
         f.write(json.dumps(configJson, indent=4))
     # (3) DBImage2DBuilder-constructor
     DBImage2DBuilder.__init__(self, pathCfgInp, pathDirOut)
     # self.initializeInfo()
     self.type = 'db-image2d-cls'
     self.basetype = 'dataset'
     self.icon = "/frontend/assets/icon/img/img-dataset1.png"
コード例 #4
0
ファイル: flow_parser.py プロジェクト: vitteemou/DLS
                for ll in nn.inpNode:
                    tinpShape.append(ll.shapeOut)
            else:
                tinpShape = nn.inpNode[0].shapeOut
        else:
            tinpShape = nn.shapeInp
        toutShape = nn._getLayer_LW().get_output_shape_for(tinpShape)
        nn.shapeInp = tinpShape
        nn.shapeOut = toutShape


####################################
if __name__ == '__main__':
    import app.backend.core.utils as dlsutils
    #
    dirData = dlsutils.getPathForDatasetDir()
    #
    foutJson = 'keras-model-generated-db.json'
    fnFlowJson = '../../../../data/network/saved/testnet_multi_input_multi_output_v1.json'
    # fnFlowJson = '../../../../data/network/saved/test_simple_cnn_model1.json'
    flowParser = DLSDesignerFlowsParser(fnFlowJson)
    flowParser_LW = DLSDesignerFlowsParser(fnFlowJson)
    flowParser.cleanAndValidate()
    flowParser_LW.cleanAndValidate()
    # (1) Build connected and validated Model Node-flow (DLS-model-representation)
    flowParser.buildConnectedFlow()
    flowParser_LW.buildConnectedFlow()
    print('----[ Network Flow]----')
    for ii, ll in enumerate(flowParser.configFlowLinked):
        print(ii, " : ", ll)
    sortedFlow = flowParser._topoSort(flowParser.configFlowLinked)