Ejemplo n.º 1
0
my_bs =16
kwargs = {'model_name': 'PGAN', 'no_vis': False, 'np_vis': True, 'restart': False, 'name': 'test4', 'dir': '../output_networks', 'configPath': 'config_test.json', 'saveIter': 16000, 'evalIter': 100, 'Scale_iter': None, 'partition_value': None, 'maxIterAtScale': my_maxIterAtScale, 'alphaJumpMode': 'linear', 'iterAlphaJump': None, 'alphaJumpVals': None, 'alphaNJumps': my_alphaNJumps, 'alphaSizeJumps': my_alphaSizeJumps, 'depthScales': None, 'miniBatchSize': my_bs, 'dimLatentVector': None, 'initBiasToZero': None, 'perChannelNormalization': None, 'lossMode': None, 'lambdaGP': None, 'leakyness': None, 'epsilonD': None, 'miniBatchStdDev': None, 'baseLearningRate': None, 'dimOutput': None, 'weightConditionG': None, 'weightConditionD': None, 'GDPP': None, 'overrides': False}
# the nEpochs isn't clear what it is
# see ./models/trainer/standard_configurations/dcgan_config.py:50:_C.nEpoch = 10
trainingConfig = {'config': {}}
#arguments for GANTrainer = trainerModule 'visualisation':need to import vis_module = importlib.import_module("visualization.np_visualizer"),'lossIterEvaluation':100,'checkPointDir':../output_networks/test1,'saveIter':16000,'modelLabel':test1,'partitionValue':None

# Build the output durectory if necessary
if not kwargs.get('dir','.'): 
    os.mkdir(kwargs.get('dir','.'))

# Checkpoint data
modelLabel = kwargs["name"]
restart = kwargs["restart"]
checkPointDir = os.path.join(kwargs["dir"], modelLabel)
checkPointData = getLastCheckPoint(checkPointDir, modelLabel)

if not os.path.isdir(checkPointDir):
    os.mkdir(checkPointDir)

with open(kwargs["configPath"], 'rb') as file:
    trainingConfig = json.load(file)

trainingConfig['pathDB'] = '../../../data/'
trainingConfig['imagefolderDataset'] = True

# Model configuration
configOverride = getConfigOverrideFromParser(kwargs, trainerModule._defaultConfig)
modelConfig = trainingConfig.get("config", {})
for item, val in configOverride.items():
    modelConfig[item] = val
Ejemplo n.º 2
0
    # Add overrides to the parser: changes to the model configuration can be
    # done via the command line
    parser = updateParserWithConfig(parser, trainerModule._defaultConfig)
    kwargs = vars(parser.parse_args())
    configOverride = getConfigOverrideFromParser(
        kwargs, trainerModule._defaultConfig)

    if kwargs['overrides']:
        parser.print_help()
        sys.exit()

    # Checkpoint data
    modelLabel = kwargs["name"]
    restart = kwargs["restart"]
    checkPointDir = os.path.join(kwargs["dir"], modelLabel)
    checkPointData = getLastCheckPoint(checkPointDir, modelLabel) # models/utils/utils.py: return 
    # checkPointData = trainConfig, pathModel,

    if not os.path.isdir(checkPointDir):
        os.mkdir(checkPointDir)

    # Training configuration
    configPath = kwargs.get("configPath", None)
    if configPath is None:
        raise ValueError("You need to input a configuratrion file")

    with open(kwargs["configPath"], 'rb') as file:
        trainingConfig = json.load(file)

    # Model configuration
    modelConfig = trainingConfig.get("config", {})