def main(modelName, phase): # import json_maker, update json files and read requested json file import Model_Settings.json_maker as json_maker if not json_maker.recompile_json_files(modelName): return jsonToRead = modelName+'.json' print("Reading %s" % jsonToRead) with open('Model_Settings/'+jsonToRead) as dataFile: modelParams = json.load(dataFile) if phase == 'train': jsonPath = modelParams['trainOutputDir'] elif phase == 'test': jsonPath = modelParams['testOutputDir'] else: print('Please enter proper phase') return resultsDict = _get_resultDict(jsonPath) acc, confmatrix = evaluate(resultsDict) print() print(confmatrix) print('----------------- Phase : ', phase) print('----------------- Prediction result path : ', jsonPath) return acc
def main(argv=None): # pylint: disable=unused-argumDt if (len(argv) < 3): print("Enter 'model name' and 'epoch number to load / 0 for new'") return modelName = argv[1] epochNumber = int(argv[2]) # import json_maker, update json files and read requested json file import Model_Settings.json_maker as json_maker if not json_maker.recompile_json_files(modelName): return jsonToRead = modelName + '.json' print("Reading %s" % jsonToRead) with open('Model_Settings/' + jsonToRead) as data_file: modelParams = json.load(data_file) modelParams['phase'] = PHASE modelParams = _set_control_params(modelParams) print(modelParams['modelName']) print('Testing steps = %.1f' % float(modelParams['testMaxSteps'])) print('Rounds on datase = %.1f' % float( (modelParams['testBatchSize'] * modelParams['testMaxSteps']) / modelParams['numTestDatasetExamples'])) print('lossFunction = ', modelParams['lossFunction']) print('Test Input: %s' % modelParams['testDataDir']) print('Test Logs Output: %s' % modelParams['testLogDir']) print('') print('') train(modelParams, epochNumber)
def main(argv=None): # pylint: disable=unused-argumDt if (len(argv)<4): print("Enter 'model name' and 'iteration number' and 'epoch number to load / 0 for new'") return modelName = argv[1] itrNum = int(argv[2]) epochNumber = int(argv[3]) if itrNum>4 or itrNum<0: print('iteration number should only be from 1 to 4 inclusive') return # import json_maker, update json files and read requested json file import Model_Settings.json_maker as json_maker if not json_maker.recompile_json_files(modelName, itrNum): return jsonToRead = modelName+'_'+str(itrNum)+'.json' print("Reading %s" % jsonToRead) with open('Model_Settings/'+jsonToRead) as data_file: modelParams = json.load(data_file) modelParams = _set_control_params(modelParams) print(modelParams['modelName']) print('Training steps = %.1f' % float(modelParams['trainMaxSteps'])) print('Rounds on datase = %.1f' % float((modelParams['trainBatchSize']*modelParams['trainMaxSteps'])/modelParams['numTrainDatasetExamples'])) print('lossFunction = ', modelParams['lossFunction']) print('Train Input: %s' % modelParams['trainDataDir']) #print('Test Input: %s' % modelParams['testDataDir']) print('Train Logs Output: %s' % modelParams['trainLogDir']) #print('Test Logs Output: %s' % modelParams['testLogDir']) print('Train Warp Output: %s' % modelParams['warpedTrainDataDir']) #print('Test Warp Output: %s' % modelParams['warpedTestDataDir']) print('') print('') if modelParams.get('lastTuple'): print('!!! Training model is built to use only the the last 2 tuples from the existing ',modelParams['numTuple'],' tuples !!!') else: print('!!! Training model is built to use all of the ', modelParams['numTuple'],' tuples !!!') print('') if epochNumber == 0: if not tf.gfile.Exists(modelParams['trainDataDir']): print("Train input data folder doesn't exist...") print(modelParams['trainDataDir']) return #if input("(Overwrite WARNING) Did you change logs directory? (y) ") != "y": # print("Please consider changing logs directory in order to avoid overwrite!") # return if tf.gfile.Exists(modelParams['trainLogDir']): tf.gfile.DeleteRecursively(modelParams['trainLogDir']) tf.gfile.MakeDirs(modelParams['trainLogDir']) train(modelParams, epochNumber)
def main(argv=None): # pylint: disable=unused-argumDt if (len(argv) < 3): print("Enter 'model name' and 'iteration number'") return modelName = argv[1] itrNum = int(argv[2]) if itrNum > 4 or itrNum < 0: print('iteration number should only be from 1 to 4 inclusive') return # import json_maker, update json files and read requested json file import Model_Settings.json_maker as json_maker if not json_maker.recompile_json_files(modelName, itrNum): return jsonToRead = modelName + '_' + str(itrNum) + '.json' print("Reading %s" % jsonToRead) with open('Model_Settings/' + jsonToRead) as data_file: modelParams = json.load(data_file) modelParams = _set_control_params(modelParams) print(modelParams['modelName']) print('Rounds on datase = %.1f' % float( (modelParams['trainBatchSize'] * modelParams['trainMaxSteps']) / modelParams['numTrainDatasetExamples'])) print('lossFunction = ', modelParams['lossFunction']) print('Train Input: %s' % modelParams['trainDataDir']) #print('Test Input: %s' % modelParams['testDataDir']) print('Train Logs Output: %s' % modelParams['trainLogDir']) #print('Test Logs Output: %s' % modelParams['testLogDir']) print('Train Warp Output: %s' % modelParams['warpedTrainDataDir']) #print('Test Warp Output: %s' % modelParams['warpedTestDataDir']) print('') print('') print('Train Main is built and Dataset is complied with n = 2 tuples!!!') print('') #if input("(Overwrite WARNING) Did you change logs directory? (y) ") != "y": # print("Please consider changing logs directory in order to avoid overwrite!") # return #if tf.gfile.Exists(modelParams['trainLogDir']): # tf.gfile.DeleteRecursively(modelParams['trainLogDir']) #tf.gfile.MakeDirs(modelParams['trainLogDir']) train(modelParams)
def main(argv=None): # pylint: disable=unused-argumDt if (len(argv) < 3): print("Enter 'model name' and 'epoch number to load / 0 for new'") return modelName = argv[1] epochNumber = int(argv[2]) # import json_maker, update json files and read requested json file import Model_Settings.json_maker as json_maker if not json_maker.recompile_json_files(modelName): return jsonToRead = modelName + '.json' print("Reading %s" % jsonToRead) with open('Model_Settings/' + jsonToRead) as data_file: modelParams = json.load(data_file) global PHASE modelParams['phase'] = PHASE modelParams = _set_control_params(modelParams) print(modelParams['modelName']) print('Training steps = %.1f' % float(modelParams['trainMaxSteps'])) print('Rounds on datase = %.1f' % float( (modelParams['trainBatchSize'] * modelParams['trainMaxSteps']) / modelParams['numTrainDatasetExamples'])) print('lossFunction = ', modelParams['lossFunction']) print('Train Input: %s' % modelParams['trainDataDir']) #print('Test Input: %s' % modelParams['testDataDir']) print('Train Logs Output: %s' % modelParams['trainLogDir']) #print('Test Logs Output: %s' % modelParams['testLogDir']) print('') print('') if epochNumber == 0: #if input("(Overwrite WARNING) Did you change logs directory? (y) ") != "y": # print("Please consider changing logs directory in order to avoid overwrite!") # return if tf.gfile.Exists(modelParams['trainLogDir']): tf.gfile.DeleteRecursively(modelParams['trainLogDir']) tf.gfile.MakeDirs(modelParams['trainLogDir']) train(modelParams, epochNumber)
import os.path import time import logging import json import csv import importlib import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf #import tensorflow.python.debug as tf_debug PHASE = 'test' # import json_maker, update json files and read requested json file import Model_Settings.json_maker as json_maker json_maker.recompile_json_files() jsonToRead = 'GPUX_170301_ITR_B_4.json' print("Reading %s" % jsonToRead) with open('Model_Settings/'+jsonToRead) as data_file: modelParams = json.load(data_file) # import input & output modules import Data_IO.data_input as data_input import Data_IO.data_output as data_output # import corresponding model name as model_cnn, specifed at json file model_cnn = importlib.import_module('Model_Factory.'+modelParams['modelName']) #################################################### FLAGS = tf.app.flags.FLAGS tf.app.flags.DEFINE_integer('printOutStep', 10,