def __init__(self, config, model_details, frac_feas):
        self.config = config
        self.numpy_graph = NumpyGraph(self.config, model_details)
        self.frac_feas = frac_feas
        # set logger verbosity
        Logger.__init__(self, 'TfprobNetwork', verbosity=self.config.get('verbosity'))

        # model hyperparams
        self.num_epochs = model_details['num_epochs']
        self.learning_rate = model_details['learning_rate']
        self.num_draws = model_details['num_draws']
        self.num_layers = model_details['num_layers']
        self.hidden_shape = model_details['hidden_shape']
        self.weight_loc = model_details['weight_loc']
        self.weight_scale = model_details['weight_scale']
        self.bias_loc = model_details['bias_loc']
        self.bias_scale = model_details['bias_scale']

        self.feature_size = len(self.config.kernel_names)
        self.bnn_output_size = len(self.config.kernel_names)
        self.target_size = len(self.config.kernel_names)

        self.trace = {}

        self.graph = tf.Graph()
        with self.graph.as_default():
            self.sess = tf.compat.v1.InteractiveSession()
    def __init__(self, config):
        self.config = config
        Logger.__init__(self,
                        'CategoryReshaper',
                        verbosity=self.config.get('verbosity'))

        self.kernel_reshaper = KernelReshaper()
Esempio n. 3
0
    def __init__(self, config, constraints=None):
        """
        constraints : list or None
            List of callables that are constraints functions. Each function takes a parameter dict, e.g.
            {'x0':0.1, 'x1':10, 'x2':'A'} and returns a bool indicating
            whether it is in the feasible region or not.
        """
        self.config = config
        self.verbosity = self.config.get('verbosity')
        Logger.__init__(self, 'GeneticOptimizer', verbosity=self.verbosity)

        # if constraints not None, and not a list, put into a list
        if constraints is not None and isinstance(constraints, list) is False:
            self.constraints = [constraints]
        else:
            self.constraints = constraints

        # define which single-step optimization function to use
        if self.constraints is None:
            self._one_step_evolution = self._evolution
        else:
            self._one_step_evolution = self._constrained_evolution

        # range of opt domain dimensions
        self.param_ranges = self.config.param_uppers - self.config.param_lowers
    def __init__(self, config, all_options=None):
        self.config = config
        # factor modulating the density-based penalty in sample selector
        self.dist_param = self.config.get('dist_param')

        self.all_options = all_options

        self.verbosity = self.config.get('verbosity')
        Logger.__init__(self, 'SampleSelector', verbosity=self.verbosity)
        # figure out how many CPUs to use
        if self.config.get('num_cpus') == 'all':
            self.num_cpus = multiprocessing.cpu_count()
        else:
            self.num_cpus = int(self.config.get('num_cpus'))

        # check to see what kind of problem we have: fully continuous,
        # mixed categorical continuous/discrete, fully discrete or fully categorical
        param_types = [param['type'] for param in self.config.parameters]
        unique_param_types = sorted(list(set(param_types)))

        if unique_param_types == ['continuous'] or unique_param_types == ['continuous', 'discrete']:
            self.problem_type = 'fully_continuous'
        elif unique_param_types == ['categorical']:
            self.problem_type = 'fully_categorical'
        elif unique_param_types == ['discrete']:
            self.problem_type = 'fully_discrete'
        elif unique_param_types == ['categorical', 'continuous'] or unique_param_types == ['categorical', 'continuous', 'discrete']:
            self.problem_type = 'mixed_continuous'
        elif unique_param_types == ['categorical', 'discrete']:
            self.problem_type = 'mixed_discrete'
        else:
            raise GryffinNotFoundError

        # get the non-categorical indices of the params space
        self.non_cat_param_idx = np.array([i for i, param in enumerate(param_types) if param!='categorical'])
Esempio n. 5
0
    def __init__(self, config, known_constraints=None):

        self.config = config
        self.known_constraints = known_constraints

        self.verbosity = self.config.get('verbosity')
        Logger.__init__(self, 'Acquisition', verbosity=self.verbosity)

        self.total_num_vars = len(self.config.feature_names)
        self.optimizer_type = self.config.get('acquisition_optimizer')

        self.bayesian_network = None
        self.local_optimizers = None
        self.sampling_param_values = None
        self.frac_infeasible = None
        self.acqs_min_max = None  # expected content is dict where key is batch_index, and dict[batch_index] = [min,max]
        self.acquisition_functions = {
        }  # to keep the AcquisitionFunction instances used

        # figure out how many CPUs to use
        if self.config.get('num_cpus') == 'all':
            self.num_cpus = multiprocessing.cpu_count()
        else:
            self.num_cpus = int(self.config.get('num_cpus'))

        # get feasibility approach and sensitivity parameter and do some checks
        self.feas_approach = self.config.get('feas_approach')
        self.feas_param = self.config.get('feas_param')
        self._check_feas_options()
    def __init__(self, config, constraints=None):
        """
        constraints : list or None
            List of callables that are constraints functions. Each function takes a parameter dict, e.g.
            {'x0':0.1, 'x1':10, 'x2':'A'} and returns a bool indicating
            whether it is in the feasible region or not.
        """
        self.config = config
        Logger.__init__(self, 'GradientOptimizer', verbosity=self.config.get('verbosity'))

        # if constraints not None, and not a list, put into a list
        if constraints is not None and isinstance(constraints, list) is False:
            self.constraints = [constraints]
        else:
            self.constraints = constraints

        # define which single-step optimization function to use
        if constraints is None:
            self._optimize_one_sample = self._optimize_sample
        else:
            self._optimize_one_sample = self._constrained_optimize_sample

        # parse positions
        self.pos_continuous = np.array([True if f == 'continuous' else False for f in self.config.feature_types])
        self.pos_categories = np.array([True if f == 'categorical' else False for f in self.config.feature_types])
        self.pos_discrete = np.array([True if f == 'discrete' else False for f in self.config.feature_types])
        # quick/simple check
        assert sum(self.pos_continuous) + sum(self.pos_categories) + sum(self.pos_discrete) == self.config.num_features

        # instantiate optimizers for all variable types
        self.opt_con = AdamOptimizer()
        self.opt_dis = NaiveDiscreteOptimizer()
        self.opt_cat = NaiveCategoricalOptimizer()
Esempio n. 7
0
	def __init__(self, path, attributes, name = 'table', verbosity = 0):

		self.WRITING_REQUESTS = []
		self.READING_REQUESTS = {}
		self.UPDATE_REQUESTS  = []
		Logger.__init__(self, 'SQLite interface', verbosity = verbosity)

		self.db_path              = 'sqlite:///%s/search_progress.db' % path
		self.attributes           = attributes
		self.name                 = name
		
		self.log('creating database %s at %s' % (self.name, self.db_path), 'DEBUG')

		# create database 
		self.db       = sql.create_engine(self.db_path)
		self.db.echo  = False		
		self.metadata = sql.MetaData(self.db)

		# create table in database
		self.table = sql.Table(self.name, self.metadata)
		for name, att_type in self.attributes.items():
			self.table.append_column(sql.Column(name, self.SQLITE_COLUMNS[att_type]))
		self.table.create(checkfirst = True)

		# start request processor
		self._process_requests()
Esempio n. 8
0
    def __init__(self, config):
        self.config = config
        Logger.__init__(self,
                        'ParamOptimizer',
                        verbosity=self.config.get('verbosity'))

        # parse positions
        self.pos_continuous = np.full(self.config.num_features,
                                      False,
                                      dtype=bool)
        self.pos_categories = np.full(self.config.num_features,
                                      False,
                                      dtype=bool)
        self.pos_discrete = np.full(self.config.num_features,
                                    False,
                                    dtype=bool)
        for feature_index, feature_type in enumerate(
                self.config.feature_types):
            if feature_type == 'continuous':
                self.pos_continuous[feature_index] = True
            elif feature_type == 'categorical':
                self.pos_categories[feature_index] = True
            elif feature_type == 'discrete':
                self.pos_discrete[feature_index] = True
            else:
                feature_name = self.config.feature_names[feature_index]
                GryffinUnknownSettingsError(
                    'did not understand parameter type "%s" for parameter "%s".\n\t(%s) Please choose from "continuous" or "categorical"'
                    % (feature_type, feature_name, self.template))

        # set up continuous optimization algorithms
        cont_opt_name = self.config.get('continuous_optimizer')
        if cont_opt_name == 'adam':
            from .numpy_optimizers import AdamOptimizer
            self.opt_con = AdamOptimizer()
        else:
            GryffinUnkownSettingsError(
                'did not understand continuous optimizer "%s".\n\tPlease choose from "adam"'
                % cont_opt_name)

        # set up discrete optimization algorithms
        disc_opt_name = self.config.get('discrete_optimizer')
        if disc_opt_name == 'naive':
            from .numpy_optimizers import NaiveDiscreteOptimizer
            self.opt_dis = NaiveDiscreteOptimizer()
        else:
            GryffinUnknownSettingsError(
                'did not understand discrete optimizer "%s".\n\tPlease choose from "naive"'
                % disc_opt_name)

        # set up categorical optimization algorithms
        cat_opt_name = self.config.get('categorical_optimizer')
        if cat_opt_name == 'naive':
            from .numpy_optimizers import NaiveCategoricalOptimizer
            self.opt_cat = NaiveCategoricalOptimizer()
        else:
            GryffinUnkownSettingsError(
                'did not understand categorical optimizer "%s".\n\tPlease choose from "naive"'
                % cat_opt_name)
Esempio n. 9
0
    def __init__(self, attributes, entries=[], verbosity=0):
        Logger.__init__(self, 'DB_Cache', verbosity=verbosity)
        self.attributes = attributes

        self.cache = {attr: [] for attr in self.attributes}
        self.num_items = 0
        for entry in entries:
            self.add(entry)
Esempio n. 10
0
    def __init__(self, config):

        self.config = config
        Logger.__init__(self, 'Acquisition', self.config.get('verbosity'))
        self.random_sampler   = RandomSampler(self.config.general, self.config.parameters)
        self.total_num_vars   = len(self.config.feature_names)
        self.local_optimizers = None
        self.num_cpus         = multiprocessing.cpu_count()
Esempio n. 11
0
    def __init__(self, config, frac_feas, model_details=None):

        self.config = config
        self.frac_feas = frac_feas

        # get domain volume
        self.volume = None
        self.inverse_volume = None
        self._get_volume()

        # variables created after sample/build_kernels
        self.obs_objs_kwn = None
        self.obs_objs_feas = None
        self.kernel_regression = None
        self.kernel_classification = None

        # variables for kernel density classification
        self.prior_0 = 1  # default prior is all feasible
        self.prior_1 = 0
        self.log_prior_0 = None
        self.log_prior_1 = None

        # variables for kernel density estimation and regression
        self.trace_kernels = None
        self.cat_reshaper = CategoryReshaper(self.config)

        # get kernel types and sizes
        self.kernel_types, self.kernel_sizes, self.kernel_ranges = self._get_kernel_types_and_sizes(self.config)


        # verbosity settings
        self.verbosity = self.config.get('verbosity')
        Logger.__init__(self, 'BayesianNetwork', verbosity=self.verbosity)

        # get bnn model details
        if model_details is None:
            self.model_details = self.config.model_details.to_dict()
        else:
            self.model_details = model_details

        # whether to use boosting, i.e. get lower prob bound
        if self.config.get('boosted'):
            self.lower_prob_bound = 1e-1
            for size in self.config.feature_ranges:
                self.lower_prob_bound *= 1. / size
        else:
            self.lower_prob_bound = 1e-25

        # whether to use kernel caching
        self.caching = self.config.get('caching')
        self.cache = None
        if self.caching is True:
            # check if we can actually construct cache and update option
            # caching is done only if all kernel types are categorical (i.e. all type 2)
            if np.sum(self.kernel_types > 1.5) == len(self.kernel_types):
                self.cache = {}
            else:
                self.caching = False
Esempio n. 12
0
    def __init__(self, config):
        self.config = config
        DB_Werkzeug.__init__(self,
                             self.config,
                             self.DB_ATTRIBUTES,
                             verbosity=self.config.get('verbosity'))
        Logger.__init__(self, 'DatabaseHandler', self.config.get('verbosity'))

        self.create_database()
        self.create_cache()
Esempio n. 13
0
 def __init__(self, config):
     self.config = config
     Logger.__init__(self,
                     'SampleSelector',
                     verbosity=self.config.get('verbosity'))
     # figure out how many CPUs to use
     if self.config.get('num_cpus') == 'all':
         self.num_cpus = multiprocessing.cpu_count()
     else:
         self.num_cpus = int(self.config.get('num_cpus'))
Esempio n. 14
0
    def __init__(self, config, model_details=None):

        self.COUNTER = 0
        self.has_sampled = False
        self.config = config
        verbosity = self.config.get('verbosity')
        if 'bayesian_network' in verbosity:
            verbosity = verbosity['bayesian_network']
        Logger.__init__(self, 'BayesianNetwork', verbosity=verbosity)
        self.kernel_contribution = lambda x: (np.sum(x), 1.)
        self.cat_reshaper = CategoryReshaper(self.config)

        # get bnn model detals
        if model_details == None:
            from .model_details import model_details
        self.model_details = model_details

        # set up bnn
        if self.config.get('backend') == 'tensorflow':
            from .tfprob_interface import TfprobNetwork
            self.network_executable = '{}/bayesian_network/tfprob_interface/tfprob_interface.py'.format(
                self.config.get('home'))
        else:
            GryffinUnknownSettingsError(
                'did not understand backend: "%s".\n\tChoose from "tensorflow"'
                % self.config_general.backend)

        # get domain volume
        self.volume = 1.
        feature_lengths = self.config.feature_lengths
        feature_ranges = self.config.feature_ranges
        for feature_index, feature_type in enumerate(
                self.config.feature_types):
            if feature_type == 'continuous':
                self.volume *= feature_ranges[feature_index]
            elif feature_type == 'categorical':
                self.volume *= feature_lengths[feature_index]
            elif feature_type == 'discrete':
                self.volume *= feature_ranges[feature_index]
            else:
                GryffinUnknownSettingsError(
                    'did not understand parameter type: "%s" of variable "%s".\n\t(%s) Please choose from "continuous" or "categorical"'
                    % (feature_type, self.config.feature_names[feature_index],
                       self.template))
        self.inverse_volume = 1 / self.volume

        # compute sampling parameter values
        if self.config.get('sampling_strategies') == 1:
            self.sampling_param_values = np.zeros(1)
        else:
            self.sampling_param_values = np.linspace(
                -1.0, 1.0, self.config.get('sampling_strategies'))
            self.sampling_param_values = self.sampling_param_values[::-1]
        self.sampling_param_values *= self.inverse_volume
Esempio n. 15
0
    def __init__(self, config):
        self.config = config
        Logger.__init__(self,
                        'CategoryReshaper',
                        verbosity=self.config.get('verbosity'))

        self.kernel_reshaper = KernelReshaper()

        if self.config.get('num_cpus') == 'all':
            self.num_cpus = multiprocessing.cpu_count()
        else:
            self.num_cpus = int(self.config.get('num_cpus'))
Esempio n. 16
0
    def __init__(self, config):

        self.config = config
        Logger.__init__(self, 'Acquisition', self.config.get('verbosity'))
        self.random_sampler = RandomSampler(self.config.general,
                                            self.config.parameters)
        self.total_num_vars = len(self.config.feature_names)
        self.local_optimizers = None
        # figure out how many CPUs to use
        if self.config.get('num_cpus') == 'all':
            self.num_cpus = multiprocessing.cpu_count()
        else:
            self.num_cpus = int(self.config.get('num_cpus'))
    def __init__(self, config, all_options=None):
        self.config = config
        # factor modulating the density-based penalty in sample selector
        self.dist_param = self.config.get('dist_param')
        self.all_options = all_options

        self.verbosity = self.config.get('verbosity')
        Logger.__init__(self, 'SampleSelector', verbosity=self.verbosity)
        # figure out how many CPUs to use
        if self.config.get('num_cpus') == 'all':
            self.num_cpus = multiprocessing.cpu_count()
        else:
            self.num_cpus = int(self.config.get('num_cpus'))
Esempio n. 18
0
    def __init__(self, config):
        self.config = config
        self.chimera = Chimera(self.config.obj_tolerances,
                               self.config.get('softness'))
        Logger.__init__(self,
                        'ObservationProcessor',
                        verbosity=self.config.get('verbosity'))

        # compute some boundaries
        self.feature_lowers = self.config.feature_lowers
        self.feature_uppers = self.config.feature_uppers
        self.soft_lower = self.feature_lowers + 0.1 * (self.feature_uppers -
                                                       self.feature_lowers)
        self.soft_upper = self.feature_uppers - 0.1 * (self.feature_uppers -
                                                       self.feature_lowers)
Esempio n. 19
0
    def __init__(self, config):

        self.config = config
        self.is_generating = False

        self.obs_params = None
        self.obs_objs = None
        self.gen_feature_descriptors = None

        verbosity = self.config.get('verbosity')
        Logger.__init__(self, 'DescriptorGenerator', verbosity=verbosity)

        if self.config.get('num_cpus') == 'all':
            self.num_cpus = multiprocessing.cpu_count()
        else:
            self.num_cpus = int(self.config.get('num_cpus'))
Esempio n. 20
0
	def __init__(self, config_general, config_params):
		self.config_general = config_general
		self.config_params  = config_params
		verbosity           = self.config_general.verbosity
		if 'random_sampler' in self.config_general.verbosity:
			verbosity = self.config_general.verbosity['random_sampler']
		Logger.__init__(self, 'RandomSampler', verbosity)

		if self.config_general.sampler == 'sobol':
			from .sobol   import SobolContinuous
			from .uniform import UniformCategorical, UniformDiscrete
			self.continuous_sampler  = SobolContinuous()
			self.categorical_sampler = UniformCategorical()
			self.discrete_sampler    = UniformDiscrete()
		elif self.config_general.sampler == 'uniform':
			from .uniform import UniformCategorical, UniformContinuous, UniformDiscrete
			self.continuous_sampler  = UniformContinuous()
			self.categorical_sampler = UniformCategorical()
			self.discrete_sampler    = UniformDiscrete()
		else:
			GryffinUnknownSettingsError('did not understanding sampler setting: "%s".\n\tChoose from "uniform" or "sobol"' % self.config_general.sampler)
Esempio n. 21
0
    def __init__(self, config, constraints=None):
        """
        known_constraints : list of callable
            List of constraint functions. Each is a function that takes a parameter dict, e.g.
            {'x0':0.1, 'x1':10, 'x2':'A'} and returns a bool indicating
            whether it is in the feasible region or not.
        """

        # register attributes
        self.config = config
        self.reject_tol = self.config.get('reject_tol')

        # if constraints not None, and not a list, put into a list
        if constraints is not None and isinstance(constraints, list) is False:
            self.constraints = [constraints]
        else:
            self.constraints = constraints

        # set verbosity
        verbosity = self.config.get('verbosity')
        Logger.__init__(self, 'RandomSampler', verbosity)
Esempio n. 22
0
    def __init__(self, config):
        self.config = config
        self.chimera = Chimera(tolerances=self.config.obj_tolerances,
                               absolutes=self.config.obj_absolutes,
                               goals=self.config.obj_goals,
                               softness=self.config.get('softness'))
        Logger.__init__(self,
                        'ObservationProcessor',
                        verbosity=self.config.get('verbosity'))

        # compute some boundaries
        self.feature_lowers = self.config.feature_lowers
        self.feature_uppers = self.config.feature_uppers
        self.soft_lower = self.feature_lowers + 0.1 * (self.feature_uppers -
                                                       self.feature_lowers)
        self.soft_upper = self.feature_uppers - 0.1 * (self.feature_uppers -
                                                       self.feature_lowers)

        # attributes of the data
        self.min_obj = None
        self.max_obj = None
Esempio n. 23
0
 def __init__(self, config, db_attributes, verbosity=0):
     Logger.__init__(self, 'DB_Werkzeug', verbosity=verbosity)
     self.config = config
     self.db_attrs = db_attributes
Esempio n. 24
0
	def __init__(self, config):
		self.config = config
		Logger.__init__(self, 'SampleSelector', verbosity = self.config.get('verbosity'))
		self.num_cpus = multiprocessing.cpu_count()