def __init__(self, input_shape, output_shape, con_len=3, con_layers=[25, 50], last_pooling=keras.layers.AvgPool1D, dense_layers=[100, 100], pooling_len=3, problem_name='noname', use_logger=False): self.name = 'CNNModel_con_len' + str(con_len) + '_con_layers' + str( con_layers) + '_pl' + str(pooling_len) + '_dense_layers' + str( dense_layers) + '_data' + problem_name super(CNNModel, self).__init__(self.name, problem_name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Convolutional Neural Network regression model initialized") self.model = self._construct_model(input_shape, output_shape, con_len=con_len, con_layers=con_layers, pooling_len=pooling_len, last_pooling=last_pooling, dense_layers=dense_layers)
def wrap(*args, **kwargs): if use_profiler: logger = sl.SciopeLogger().get_logger() start_time = time.time() result = function_handle(*args, **kwargs) logger.info("Function {0} run time = {1} seconds".format(function_handle, time.time() - start_time)) return result
def __init__(self, data, sim, prior_function, perturbation_kernel=None, summaries_function=bs.Burstiness().compute, distance_function=euc.EuclideanDistance(), summaries_divisor=None, use_logger=False): self.name = 'SMC-ABC' super(SMCABC, self).__init__(self.name, data, sim, use_logger) self.prior_function = prior_function self.summaries_function = summaries_function self.distance_function = distance_function self.summaries_divisor = summaries_divisor if perturbation_kernel is not None: self.perturbation_kernel = perturbation_kernel else: self.perturbation_kernel = MultivariateNormalKernel( d=self.prior_function.get_dimension(), adapt=True) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Sequential Monte-Carlo Approximate Bayesian Computation initialized" )
def __init__(self, xmin, xmax, use_logger=False, seed_size=None): """ LatinHypercube constructor Parameters ---------- xmin : vector or 1D array Specifies the lower bound of the hypercube within which the design is generated xmax : vector or 1D array Specifies the upper bound of the hypercube within which the design is generated use_logger : bool, optional controls whether logging is enabled or disabled, by default False seed_size : int, optional number of points in the LHS seed design """ name = 'LatinHypercube' super(LatinHypercube, self).__init__(name, xmin, xmax, use_logger) self._seed_size = len(xmin) if seed_size is None else seed_size self._nv = len(xmin) # dimensionality / # variables assert (1.0 <= self._seed_size <= len(xmin)) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Latin hypercube design in {0} dimensions initialized".format( len(self.xmin)))
def __init__(self, data, sim, prior_function, num_monte_carlo=20, verbose=False, use_logger=False): self.name = 'BNN Regressor' #super(BNN, self).__init__(self.name, data, sim, use_logger) self.prior_function = prior_function self.num_monte_carlo = num_monte_carlo self.use_logger = use_logger #TODO: use super at production ready self.sim = sim #TODO: use super at production ready self.data = data #TODO: use super at production ready self.verbose = verbose self._train_hyperparams = { 'batch_size': 256, 'epochs': 400, 'verbose': False } self._bnn_complied = False if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Sequential Bayesian neural network posterior esitmator initialized" )
def __init__(self, use_logger=False): """ We just set the name here and call the superclass constructor. """ self.name = 'Euclidean' super(EuclideanDistance, self).__init__(self.name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("EuclideanDistance distance function initialized")
def __init__(self, kernel='rbf', alpha=0.7, gamma=0.1, learning_rate=1.0, use_logger=False): self.name = 'LPModel' self.kernel = kernel self.alpha = alpha self.gamma = gamma self.learning_rate = learning_rate super(LPModel, self).__init__(self.name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Label propagation model initialized")
def __init__(self, normalization=None, mean_trajectories=False, use_logger=False): self.name = 'Identity' self.normalization = normalization super(Identity, self).__init__(self.name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Identity summary statistic initialized")
def __init__(self, use_logger=False): """ Initialize the model. Parameters ---------- use_logger : bool, optional Controls whether logging is enabled or disabled, by default False """ self.name = 'ANNModel' super(ANNModel, self).__init__(self.name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Artificial Neural Network regression model initialized")
def __init__(self, input_shape, output_shape, layers=[100, 100, 100], use_logger=False, problem_name="None"): self.name = 'DNNModel_l' + str(layers) super(DNNModel, self).__init__(self.name, problem_name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Deep Neural Network regression model initialized") self.model = self._construct_model(input_shape, output_shape, layers=layers)
def __init__(self, space_min, space_max, use_logger=False): """ Set up a uniform prior corresponding to the space bounded by: :param space_min: the lowerbound of each variable/dimension :param space_max: the upperbound of each variable/dimension :param use_logger: whether logging is enabled or disabled """ self.name = 'Uniform' self.lb = space_min self.ub = space_max super(UniformPrior, self).__init__(self.name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Uniform prior in {} dimensions initialized".format( len(self.lb)))
def __init__(self, use_logger=False): """ Initialize the model. Parameters ---------- name : string Model name; set by the derived class use_logger : bool, optional Controls whether logging is enabled or disabled, by default False """ self.name = 'SVRModel' super(SVRModel, self).__init__(self.name, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Support Vector Regression model initialized")
def __init__(self, data, sim, prior_function, epsilon=0.1, summaries_function=bs.Burstiness(), distance_function=euc.EuclideanDistance(), summaries_divisor=None, use_logger=False): """ ABC class for rejection sampling Parameters ---------- data : nd-array the observed / fixed dataset sim : nd-array the simulated dataset or simulator function prior_function : sciope.utilities.priors object the prior function generating candidate samples epsilon : float, optional tolerance bound, by default 0.1 summaries_function : sciope.utilities.summarystats object, optional function calculating summary stats over simulated results; by default bs.Burstiness() distance_function : sciope.utilities.distancefunctions object, optional distance function operating over summary statistics - calculates deviation between observed and simulated data; by default euc.EuclideanDistance() summaries_divisor : 1D numpy array, optional instead of normalizing using division by current known max of each statistic, use the supplied division factors. These may come from prior knowledge, or pre-studies, etc. use_logger : bool enable/disable logging """ self.name = 'ABC' self.epsilon = epsilon self.summaries_function = summaries_function self.prior_function = prior_function.draw self.distance_function = distance_function.compute self.historical_distances = [] self.summaries_divisor = summaries_divisor self.use_logger = use_logger super(ABC, self).__init__(self.name, data, sim, self.use_logger) self.sim = sim if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Approximate Bayesian Computation initialized")
def __init__(self, data, sim, prior_function, perturbation_kernel=None, summaries_function=identity.Identity(), distance_function=euc.EuclideanDistance(), summaries_divisor=None, use_logger=False): """Replenishment SMC-ABC implementation. Parameters ---------- data : nd-array the observed / fixed dataset sim : Callable[[nd-array], nd-array] the simulator function prior_function : sciope.utilities.priors object the prior function generating candidate samples perturbation_kernel : sciope.utilities.perturbationkernels object, optional kernel to perturb samples summaries_function : sciope.utilities.summarystats object, optional function calculating summary stats over simulated results distance_function : sciope.utilities.distancefunction, optional distance function operating over summary statistics use_logger : bool enable/disable logging """ self.name = 'Replenisment-SMC-ABC' super(ReplenishmentSMCABC, self).__init__(self.name, data, sim, use_logger) self.prior_function = prior_function self.summaries_function = summaries_function self.distance_function = distance_function.compute self.summaries_divisor = summaries_divisor if perturbation_kernel is not None: self.perturbation_kernel = perturbation_kernel else: self.perturbation_kernel = MultivariateNormalKernel( d=self.prior_function.get_dimension(), adapt=True) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Replenisment Sequential Monte-Carlo \ Approximate Bayesian Computation initialized")
def __init__(self, xmin, xmax, use_logger=False): """[summary] Parameters ---------- xmin : vector or 1D array Specifies the lower bound of the hypercube within which the design is generated xmax : vector or 1D array Specifies the upper bound of the hypercube within which the design is generated use_logger : bool, optional controls whether logging is enabled or disabled, by default False """ name = 'RandomSampling' super(RandomSampling, self).__init__(name, xmin, xmax, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Random design in {0} dimensions initialized".format(len(self.xmin)))
def __init__(self, xmin, xmax, use_logger=False): """ Initialize the sampler. Parameters ---------- xmin : vector or 1D array Specifies the lower bound of the hypercube within which sampling is performed xmax : vector or 1D array Specifies the upper bound of the hypercube within which sampling is performed use_logger : bool, optional Controls whether logging is enabled or disabled, by default True """ name = 'MaximinSampling' super(MaximinSampling, self).__init__(name, xmin, xmax, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Maximin sequential sampler in {0} dimensions initialized". format(len(self.xmin)))
def __init__(self, input_shape, output_shape, num_train_examples, conv_channel=6, kernel_size=5, pooling_len=10, problem_name='noname', use_logger=False): self.name = 'BNNModel' #super(BNNModel, self).__init__(self.name, use_logger) #TODO: use at production ready self.use_logger = use_logger #TODO: use super at production ready if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Bayesian Neural Network classifier model initialized") self.model = self._construct_model(input_shape, output_shape, conv_channel, kernel_size, num_train_examples, pooling_len)
def __init__(self, mean_trajectories=False, improvement=False, use_logger=False): """ [summary] Parameters ---------- mean_trajectories : bool, optional [description], by default True improvement : bool, optional [description], by default False """ self.name = 'Burstiness' self.improvement = improvement super(Burstiness, self).__init__(self.name, mean_trajectories, use_logger) if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info("Burstiness summary statistic initialized")
def __init__(self, levels, xmin, xmax, use_logger=False): """ Initialize a factorial design with specified parameters Parameters ---------- levels : integer The number of levels of the factorial design. Number of generated points will be levels^dimensionality xmin : vector or 1D array Specifies the lower bound of the hypercube within which the design is generated xmax : vector or 1D array Specifies the upper bound of the hypercube within which the design is generated use_logger : bool, optional controls whether logging is enabled or disabled, by default False """ name = 'FactorialDesign' super(FactorialDesign, self).__init__(name, xmin, xmax, use_logger) self.levels = levels if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Factorial design in {0} dimensions initialized".format( len(self.xmin)))
def __init__(self, data, sim, prior_function, num_bins=10, num_monte_carlo=500, verbose=False, use_logger=False): self.name = 'BNN Classifier' #super(BNN, self).__init__(self.name, data, sim, use_logger) self.prior_function = prior_function.draw self.num_bins = num_bins self.num_monte_carlo = num_monte_carlo self.use_logger = use_logger #TODO: use super at production ready self.sim = sim #TODO: use super at production ready self.data = data #TODO: use super at production ready self.verbose = verbose if self.use_logger: self.logger = ml.SciopeLogger().get_logger() self.logger.info( "Sequential Bayesian neural network posterior esitmator initialized" )
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Factorial Initial Design """ # Imports from sciope.designs.initial_design_base import InitialDesignBase from sciope.utilities.housekeeping import sciope_logger as ml import gpflowopt # Set up the logger logger = ml.SciopeLogger().get_logger() # Class definition class FactorialDesign(InitialDesignBase): """ Factorial design implemented through gpflowopt * InitialDesignBase.generate(n) """ def __init__(self, xmin, xmax): name = 'FactorialDesign' super(FactorialDesign, self).__init__(name, xmin, xmax) logger.info("Factorial design in {0} dimensions initialized".format( len(self.xmin)))
def get_logger(): return ml.SciopeLogger().get_logger()
# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Provides very basic profiling """ # Imports import time from sciope.utilities.housekeeping import sciope_logger as sl # Set up the logger logger = sl.SciopeLogger().get_logger() # Very basic function run-time logging def profile(function_handle): def wrap(*args, **kwargs): start_time = time.time() result = function_handle(*args, **kwargs) logger.info("Function {0} run time = {1} seconds".format( function_handle, time.time() - start_time)) return result return wrap