コード例 #1
0
def main():

	# --- CHANGE THESE
	NAME = None
	C_VECTOR = None
	DIMENSION = None
	RADIUS = None
	RANDOM_SEED = None
	NUM_EPOCHS = None
	FREQUENCY = None
	LAYER_SIZES = None
	LOCAL_METHODS = [FastLip, LipLP, LipMIP]
	GLOBAL_METHODS = OTHER_METHODS + [LipMIP]

	NUM_RANDOM = None # Number of points in exp.do_random_evals
	NUM_DATA = None   # Number of points in exp.do_data_evals

	assert all(_ is not None for _ in [NAME, C_VECTOR, DIMENSION, RADIUS,
									   RANDOM_SEED, NUM_EPOCHS, FREQUENCY,
									   LAYER_SIZES])

	# --- THESE CAN REMAIN AS IS
	exp_kwargs = {'c_vector': C_VECTOR, 'primal_norm': 'linf'}
	GLOBAL_LO = np.zeros(DIMENSION)
	GLOBAL_HI = np.ones(DIMENSION)
	DOMAIN = Hyperbox.build_unit_hypercube(DIMENSION)
	BALL_FACTORY = Factory(Hyperbox.build_linf_ball, radius=RADIUS)

	# ================================================================
	# =           Data Parameters Setup                              =
	# ================================================================
	# Make both the training/validation sets
	data_params = dl.RandomKParameters(num_points=300, k=10, radius=0.01,
									   dimension=DIMENSION)
	dataset = dl.RandomDataset(data_params, batch_size=128,
				  			   random_seed=RANDOM_SEED)
	train_set, _ = dataset.split_train_val(1.0)

	# Make the data arg_bundle object
	loader_kwargs = {'batch_size': NUM_DATA, 'random_seed': RANDOM_SEED}
	data_arg_bundle = {'data_type': 'synthetic',
					   'params': data_params,
					   'loader_kwargs': loader_kwargs,
					   'ball_factory': BALL_FACTORY}

	# ================================================================
	# =           Training Parameter Setup                           =
	# ================================================================
	# Build the loss functional and set the optimizer
	xentropy = train.XEntropyReg()
	l2_penalty = train.LpWeightReg(scalar=1e-2, lp='l2')
	loss_functional = train.LossFunctional(regularizers=[xentropy, l2_penalty])
	train_params = train.TrainParameters(train_set, train_set, NUM_EPOCHS,
										 loss_functional=loss_functional,
										 test_after_epoch=20)
	# Build the base network architecture
	network = ReLUNet(layer_sizes=LAYER_SIZES)


	# ================================================================
	# =           Build the Experiment objects                       =
	# ================================================================

	local_exp = Experiment(LOCAL_METHODS, network=network, **exp_kwargs)
	global_exp = Experiment(GLOBAL_METHODS, network=network, **exp_kwargs)

	# ================================================================
	# =           Build the methodNests                              =
	# ================================================================

	# --- randomly evaluated method nest
	random_nest = MethodNest(Experiment.do_random_evals,
	   						  {'sample_domain': DOMAIN,
							   'ball_factory': BALL_FACTORY,
							   'num_random_points': NUM_RANDOM})

	# --- data-based method nest
	data_nest = MethodNest(Experiment.do_data_evals, data_arg_bundle)


	# --- hypercube stuff
	cube_nest = MethodNest(Experiment.do_unit_hypercube_eval)

	local_nests = [random_nest, data_nest]
	global_nests = [cube_nest]


	def build_jobs(epoch_no, network=None):
		local_job_name = '%s_EPOCH%04d_LOCAL' % (NAME, epoch_no)
		local_job = Job(local_job_name, local_exp, local_nests,
						save_loc=SCHEDULE_DIR)
		local_job.write()


		global_job_name = '%s_EPOCH%04d_GLOBAL' % (NAME, epoch_no)
		global_job = Job(global_job_name, global_exp, global_nests,
						 save_loc=SCHEDULE_DIR)
		global_job.write()

	job_do_every = DoEvery(build_jobs, FREQUENCY)

	# ==============================================================
	# =           Train the network                                =
	# ==============================================================

	train.training_loop(network, train_params, epoch_callback=job_do_every)
コード例 #2
0
def main():
    NAME = 'WideSeq'
    exp_kwargs = {'c_vector': np.array([1.0, -1.0]), 'primal_norm': 'linf'}
    DIMENSION = 2
    RADIUS = 0.1
    GLOBAL_LO = np.zeros(DIMENSION)
    GLOBAL_HI = np.ones(DIMENSION)
    RANDOM_SEED = 420
    DOMAIN = Hyperbox.build_unit_hypercube(DIMENSION)
    BALL_FACTORY = Factory(Hyperbox.build_linf_ball, radius=RADIUS)
    LAYER_SEQ = [([2, 20] + [20 * i] + [20, 2]) for i in range(1, 6)]

    def NAME_FXN(network):
        """ Returns a string based on the network """
        width = network.layer_sizes[2]
        return '%s_WIDTH%04d' % (NAME, width)

    # ================================================================
    # =           Data Parameters Setup                              =
    # ================================================================
    # Make both the training/validation sets
    data_params = dl.RandomKParameters(num_points=300,
                                       k=10,
                                       radius=0.01,
                                       dimension=DIMENSION)
    dataset = dl.RandomDataset(data_params,
                               batch_size=128,
                               random_seed=RANDOM_SEED)
    train_set, _ = dataset.split_train_val(1.0)

    # Make the data arg_bundle object
    loader_kwargs = {'batch_size': 100, 'random_seed': RANDOM_SEED}
    data_arg_bundle = {
        'data_type': 'synthetic',
        'params': data_params,
        'loader_kwargs': loader_kwargs,
        'ball_factory': BALL_FACTORY
    }

    # ================================================================
    # =           Training Parameter Setup                           =
    # ================================================================

    # Build the loss functional and set the optimizer
    xentropy = train.XEntropyReg()
    l2_penalty = train.LpWeightReg(scalar=1e-4, lp='l2')
    loss_functional = train.LossFunctional(regularizers=[xentropy])
    train_params = train.TrainParameters(train_set,
                                         train_set,
                                         500,
                                         loss_functional=loss_functional,
                                         test_after_epoch=50)

    # ================================================================
    # =           Build the methodNests                              =
    # ================================================================

    # --- randomly evaluated method nest
    random_nest = MethodNest(
        Experiment.do_random_evals, {
            'sample_domain': DOMAIN,
            'ball_factory': BALL_FACTORY,
            'num_random_points': 20
        })

    # --- data-based method nest
    data_nest = MethodNest(Experiment.do_data_evals, data_arg_bundle)

    # --- hypercube stuff
    cube_nest = MethodNest(Experiment.do_unit_hypercube_eval)

    local_nests = [random_nest, data_nest, cube_nest]
    global_nests = [cube_nest]

    def build_jobs(network, **exp_kwargs):
        local_exp = Experiment([FastLip, LipLP, LipMIP],
                               network=network,
                               **exp_kwargs)
        global_exp = Experiment(
            [LipMIP, FastLip, LipLP, SeqLip, LipSDP, NaiveUB],
            network=network,
            **exp_kwargs)
        prefix = NAME_FXN(network)
        #prefix = '%s_RELUS%02d' % (NAME, network.num_relus)
        local_job_name = prefix + "_LOCAL"
        local_job = Job(local_job_name,
                        local_exp,
                        local_nests,
                        save_loc=SCHEDULE_DIR)
        global_job_name = prefix + "_GLOBAL"
        global_job = Job(global_job_name,
                         global_exp,
                         global_nests,
                         save_loc=SCHEDULE_DIR)
        local_job.write()
        global_job.write()

    # ==============================================================
    # =           Train the networks                                =
    # ==============================================================

    for layer_size in LAYER_SEQ:
        print("Starting training:", layer_size)
        network = ReLUNet(layer_sizes=layer_size)
        train.training_loop(network, train_params)
        build_jobs(network, **exp_kwargs)
コード例 #3
0
def main():
	# ==========================================================
	# =           SETUP -- CHANGE THESE VARIABLES!!!           =
	# ==========================================================
	NAME = 'kdData'
	C_VECTOR = np.array([1.0, -1.0])
	PRIMAL_NORM = 'linf'
	RADIUS = 0.2
	RANDOM_SEED = 420
	LAYER_SIZES = [20, 40, 80, 2]
	DIMENSION_SERIES = [2, 4, 8, 16]
	NUM_POINT_SERIES = [250, 1000, 2000, 4000]
	LOCAL_METHODS = [FastLip, LipLP, LipMIP]
	GLOBAL_METHODS = [LipMIP, FastLip, LipLP, SeqLip, LipSDP, 
					  NaiveUB, RandomLB]

	exp_kwargs = {'c_vector': C_VECTOR,
				  'primal_norm': PRIMAL_NORM}
	# ==========================================================
	# =           HELPER SETUP -- THESE CAN BE LEFT ALONE      =
	# ==========================================================
	BALL_FACTORY = Factory(Hyperbox.build_linf_ball, radius=RADIUS)
	def NAME_FXN(network):
		""" Returns a string based on the network """
		return '%s_DIM%04d' % (NAME, network.layer_sizes[0])

	def build_jobs(network, **exp_kwargs):
		local_exp = Experiment([FastLip, LipLP, LipMIP], 
							   network=network, **exp_kwargs)	
		global_exp = Experiment([LipMIP, FastLip, LipLP, SeqLip, LipSDP, NaiveUB], 
								network=network, **exp_kwargs)
		prefix = NAME_FXN(network)
		#prefix = '%s_RELUS%02d' % (NAME, network.num_relus)
		local_job_name = prefix + "_LOCAL"
		local_job = Job(local_job_name, local_exp, local_nests,
						save_loc=SCHEDULE_DIR)
		global_job_name = prefix + "_GLOBAL"
		global_job = Job(global_job_name, global_exp, global_nests,
						 save_loc=SCHEDULE_DIR)
		local_job.write()		
		global_job.write()


	# =======================================================
	# =           LOOP OVER DIMENSIONS WE ALLOW             =
	# =======================================================	
	for DIMENSION, NUM_POINTS in zip(DIMENSION_SERIES, NUM_POINT_SERIES):

		# -----------  Build dataset   -----------


		DOMAIN = Hyperbox.build_unit_hypercube(DIMENSION)
		data_params = dl.RandomKParameters(num_points=NUM_POINTS, k=5 * DIMENSION,
										   radius=0.01 * DIMENSION, dimension=DIMENSION)
		dataset = dl.RandomDataset(data_params, batch_size=128, 
										 random_seed=RANDOM_SEED)
		train_set, _ = dataset.split_train_val(1.0)

		# Make the data arg_bundle object
		loader_kwargs = {'batch_size': 100, 'random_seed': RANDOM_SEED}
		data_arg_bundle = {'data_type': 'synthetic', 
						   'params': data_params,
						   'loader_kwargs': loader_kwargs,
						   'ball_factory': BALL_FACTORY}


		# -----------  Build training parameters      -----------
		# Build the loss functional and set the optimizer 
		xentropy = train.XEntropyReg()
		l2_penalty = train.LpWeightReg(scalar=1e-4, lp='l2')
		loss_functional = train.LossFunctional(regularizers=[xentropy])
		train_params = train.TrainParameters(train_set, train_set, 1000, 
											 loss_functional=loss_functional,
											 test_after_epoch=50)


		# -----------  Build MethodNests              -----------
		# --- randomly evaluated method nest
		random_nest = MethodNest(Experiment.do_random_evals, 
		   						  {'sample_domain': DOMAIN, 
								   'ball_factory': BALL_FACTORY,
								   'num_random_points': 20})

		# --- data-based method nest 
		data_nest = MethodNest(Experiment.do_data_evals, data_arg_bundle)


		# --- hypercube stuff 
		cube_nest = MethodNest(Experiment.do_unit_hypercube_eval)

		local_nests = [random_nest, data_nest, cube_nest]
		global_nests = [cube_nest]



		# -----------  Train the networks  -----------
		print("Starting training: DIMENSION ", DIMENSION)
		network = ReLUNet(layer_sizes=[DIMENSION] + LAYER_SIZES)
		train.training_loop(network, train_params)
		build_jobs(network, **exp_kwargs)
コード例 #4
0
def main():
	# ==========================================================
	# =           SETUP -- CHANGE THESE VARIABLES!!!           =
	# ==========================================================
	# --- COMMON BLOCK (holds names, objectives, etc)
	NAME = 'REGBUNDLE' 		# Name for this experiment
	C_VECTOR = np.array([1.0, -1.0])
	PRIMAL_NORM = 'linf'
	DATA_PARAMS = {'num_points': 256, 
				   'k': 10,
				   'radius': 0.02, 
 			       'num_classes': 2,
 			       'dimension': 2}

	LAYER_SIZES = [2, 20, 40, 20, 2]  	# defines architecture of net to test


	# --- CONTROL BLOCK (holds training params, num_restarts, jobs at end?)
	REGULARIZER_SERIES = {'vanilla': [train.XEntropyReg()],
			 			  'l2Pen': [train.XEntropyReg(), 
			 		   			    train.LpWeightReg(scalar=1e-2, lp='l2')],
		 			   	  'l1Pen': [train.XEntropyReg(), 
 			   	   			  		train.LpWeightReg(scalar=1e-3, lp='l1')],
		 			   	  'FGSM':  [train.FGSM(0.1)]}
	NUM_RESTARTS = 5  	 	# how many times to restart training
	JOB_FREQUENCY = 100

	# --- EXPERIMENTAL PARAM BLOCK (holds ball_factory, which methods to eval)
	RADIUS = 0.1        # Ball factory radius for random/data evals
	RANDOM_SEED = 420    # Dataset random seed
	NUM_EXP_RANDOM = 20 # num of random points to test in experiments
	NUM_EXP_DATA   = 20 # num of data points to test in experiments
	LOCAL_METHODS = [FastLip, LipLP, LipMIP] #Methods to do random/data
	GLOBAL_METHODS = [LipMIP, FastLip, LipLP, SeqLip, LipSDP,					  NaiveUB, RandomLB] # Methods to do unit hcube


	# -- COMPUTED HELPER BLOCK
	exp_kwargs = {'c_vector': C_VECTOR,
				  'primal_norm': PRIMAL_NORM}	
	DOMAIN = Hyperbox.build_unit_hypercube(DATA_PARAMS['dimension'])
	BALL_FACTORY = Factory(Hyperbox.build_linf_ball, radius=RADIUS)
	# ================================================================
	# =           Data Parameters Setup                              =
	# ================================================================
	# Make both the training/validation sets 
	data_params = dl.RandomKParameters(**DATA_PARAMS)
	dataset = dl.RandomDataset(data_params, batch_size=128, 
							   random_seed=RANDOM_SEED)
	train_set, _ = dataset.split_train_val(1.0)

	# Make the data arg_bundle object
	loader_kwargs = {'batch_size': NUM_EXP_DATA, 
					 'random_seed': RANDOM_SEED}
	data_arg_bundle = {'data_type': 'synthetic', 
					   'params': data_params,
					   'loader_kwargs': loader_kwargs,
					   'ball_factory': BALL_FACTORY}

	# ================================================================
	# =           Build the methodNests                              =
	# ================================================================

	# --- randomly evaluated method nest
	random_nest = MethodNest(Experiment.do_random_evals, 
	   						  {'sample_domain': DOMAIN, 
							   'ball_factory': BALL_FACTORY,
							   'num_random_points': NUM_EXP_DATA})

	# --- data-based method nest 
	data_nest = MethodNest(Experiment.do_data_evals, data_arg_bundle)


	# --- hypercube stuff 
	cube_nest = MethodNest(Experiment.do_unit_hypercube_eval)

	local_nests = [random_nest, data_nest]
	global_nests = [cube_nest]

	def build_callback_and_final(reg_name):
		# Builds the epoch_callback, final call

		def build_jobs(epoch_no, network=None, NAME=NAME, reg_name=reg_name):
			prefix = '%s_REG%s_EPOCH%04d' % (NAME, reg_name, epoch_no)
			local_exp = Experiment(LOCAL_METHODS, network=network, **exp_kwargs)
			global_exp = Experiment(GLOBAL_METHODS, network=network, **exp_kwargs)

			local_job = Job('%s_LOCAL' % prefix, local_exp, local_nests,
							save_loc=SCHEDULE_DIR)
			global_job = Job('%s_GLOBAL' % prefix, global_exp, global_nests,
							 save_loc=SCHEDULE_DIR)

			local_job.write()
			global_job.write()



		if JOB_FREQUENCY is None:
			return None, build_jobs
		else:
			return DoEvery(build_jobs, JOB_FREQUENCY), build_jobs

	# ==============================================================
	# =           Train the networks                                =
	# ==============================================================
	for reg_name, regularizers in REGULARIZER_SERIES.items():
		print('-' * 30, "TRAINING --", reg_name)
		# First build job builder:
		callback, final = build_callback_and_final(reg_name)

		# Then train function
		loss_functional = train.LossFunctional(regularizers=regularizers)
		train_params = train.TrainParameters(train_set, train_set, 500,
										 loss_functional=loss_functional,
										 test_after_epoch=100)
		network = ReLUNet(layer_sizes=LAYER_SIZES)

		train.best_of_k(network, train_params, NUM_RESTARTS,
						epoch_callback=callback)
		# Finally call the final fxn
		final(epoch_no=train_params.num_epochs, network=network)
コード例 #5
0
def main():
	NAME = None
	LAYER_SIZES = None
	C_VECTOR = None # list of digits or the string 'crossLipschitz'
	RANDOM_SEED = None
	RADIUS = None
	MNIST_DIGITS = None 	
	FREQUENCY = None
	EPOCHS = None
	assert all([_ is not None for _ in [NAME, LAYER_SIZES, C_VECTOR, 
										RANDOM_SEED, RADIUS, FREQUENCY, 
										EPOCHS]])

	exp_kwargs = {'c_vector': C_VECTOR,
				  'primal_norm': 'linf'}
	DIMENSION = 784
	GLOBAL_LO = np.zeros(DIMENSION)
	GLOBAL_HI = np.ones(DIMENSION)
	DOMAIN = Hyperbox.build_unit_hypercube(DIMENSION)
	BALL_FACTORY = Factory(Hyperbox.build_linf_ball, radius=RADIUS)
	NAMER = lambda epoch_no: '%s_EPOCH%04d' % (NAME, epoch_no)
	# ================================================================
	# =           Data Parameters Setup                              =
	# ================================================================
	# Make both the training/validation sets 

	train_set = dl.load_mnist_data('train', digits=MNIST_DIGITS)
	val_set = dl.load_mnist_data('val', digits=MNIST_DIGITS)

	# Make the data arg_bundle object
	loader_kwargs = {'batch_size': 100, 'digits': MNIST_DIGITS,
					 'shuffle': True}
	train_arg_bundle = {'data_type': 'MNIST',
		 			    'loader_kwargs': loader_kwargs,
		 			    'ball_factory': BALL_FACTORY,
		 			    'train_or_val': 'train'}
	val_arg_bundle = {'data_type': 'MNIST',
 	 			      'loader_kwargs': loader_kwargs,
		 			  'ball_factory': BALL_FACTORY,
		 			  'train_or_val': 'val'}

	# ================================================================
	# =           Training Parameter Setup                           =
	# ================================================================

	# Build the loss functional and set the optimizer 
	xentropy = train.XEntropyReg()
	l2_penalty = train.LpWeightReg(scalar=1e-2, lp='l2')
	loss_functional = train.LossFunctional(regularizers=[xentropy])
	train_params = train.TrainParameters(train_set, train_set, EPOCHS, 
										 loss_functional=loss_functional,
										 test_after_epoch=20)
	# Build the base network architecture
	network = ReLUNet(layer_sizes=LAYER_SIZES)


	# ================================================================
	# =           Build the Experiment objects                       =
	# ================================================================

	local_exp = Experiment([FastLip, LipLP, LipMIP], network=network,
						   **exp_kwargs)
	global_exp = Experiment(GLOBAL_METHODS, network=network, **exp_kwargs)

	# ================================================================
	# =           Build the methodNests                              =
	# ================================================================

	# --- randomly evaluated method nest
	random_nest = MethodNest(Experiment.do_random_evals, 
	   						  {'sample_domain': DOMAIN, 
							   'ball_factory': BALL_FACTORY,
							   'num_random_points': 20})

	# --- data-based method nest 
	train_nest = MethodNest(Experiment.do_data_evals, train_arg_bundle)
	val_nest = MethodNest(Experiment.do_data_evals, val_arg_bundle)


	# --- hypercube stuff 
	cube_nest = MethodNest(Experiment.do_unit_hypercube_eval)

	local_nests = [random_nest, train_nest, val_nest, cube_nest]
	global_nests = [cube_nest]


	def build_jobs(epoch_no, network=None):
		local_job_name = NAMER(epoch_no) + '_LOCAL'
		local_job = Job(local_job_name, local_exp, local_nests,
						save_loc=SCHEDULE_DIR)
		local_job.write()

		global_job_name = NAMER(epoch_no) + '_GLOBAL'
		global_job = Job(global_job_name, global_exp, global_nests,
						 save_loc=SCHEDULE_DIR)
		global_job.write()

	if FREQUENCY is None:
		job_do_every = None
	else:
		job_do_every = DoEvery(build_jobs, FREQUENCY)

	# ==============================================================
	# =           Train the network                                =
	# ==============================================================

	train.training_loop(network, train_params, epoch_callback=job_do_every)
	if FREQUENCY is None:
		build_jobs(EPOCHS)