コード例 #1
0
elif args.params in ['few_betas', 'small_beta']:
        params_used = beta_params

if args.dmax is not None:
        params_used["layers.5.layer_kwargs.d_max"] = list(args.dmax)
        vary_together = False

#params = {"activation.encoder": ["softplus", "sigmoid"]}                                                                                                                     
if args.dataset == 'fmnist':
	d = dataset.fMNIST()
elif args.dataset == 'binary_mnist':
	d = dataset.MNIST(binary= True)
elif args.dataset == 'mnist':
	d = dataset.MNIST()
elif args.dataset == 'omniglot':
	d = dataset.Omniglot()
elif args.dataset == 'dsprites':
	d = dataset.DSprites()

if args.per_label is not None:
	d.shrink_supervised(int(args.per_label))


# name is important!  = filesave location                                                                                                                                     
if args.time is not None:
    a = session.Session(name=args.name, config=args.config, dataset = d, parameters= params_used, time = args.time, verbose = args.verbose, per_label = args.per_label, vary_together= vary_together)
else:
    a = session.Session(name=args.name, config=args.config, dataset = d, parameters= params_used, verbose = args.verbose, per_label = args.per_label, vary_together = vary_together)
#for i in a.configs:
#	print(i)
#	print()
コード例 #2
0
    def _parse_args(self):
        # Note : can also pass dataset directly
        if isinstance(self.dataset, str):
            if self.dataset == 'mnist':
                self.dataset = dataset.MNIST(binary=False)
            elif self.dataset == 'binary_mnist':
                self.dataset = dataset.MNIST(binary=True)
            elif self.dataset == 'omniglot':
                self.dataset = dataset.Omniglot()
            elif self.dataset == 'celeb_a':
                pass
            elif self.dataset == 'dsprites':
                self.dataset = dataset.DSprites()

        # selects per_label examples from each class for reduced training data
        if self.per_label is not None:
            self.dataset.shrink_supervised(self.per_label)
        self.dataset_clean = copy(self.dataset)

        if self.optimizer_params.get("norm", False):
            opt_norm = self.optimizer_params["norm"]
            del self.optimizer_params["norm"]
        else:
            opt_norm = None
        self.optimizer = getattr(keras.optimizers,
                                 self.optimizer)(**self.optimizer_params)

        #if opt_norm is not None:
        #    self.optimizer = NormalizedOptimizer(self.optimizer, normalization = opt_norm)

        self.lr_callback = False
        if isinstance(self.lr, str):
            try:
                mod = importlib.import_module(str('lr_sched'))
                # LR Callback will be True /// self.lr = function of epochs -> lr
                self.lr_callback = isinstance(self.lr, str)
                self.lr = getattr(mod, self.lr)
            except:
                try:
                    mod = importlib.import_module(
                        str('custom_functions.lr_sched'))
                    # LR Callback will be True /// self.lr = function of epochs -> lr
                    self.lr_callback = isinstance(self.lr, str)
                    self.lr = getattr(mod, self.lr)
                except:
                    #self.lr = dflt.get('lr', .001)
                    #print()
                    warnings.warn(
                        "Cannot find LR Schedule function.  Proceeding with default, constant learning rate of 0.001."
                    )
                    #print()

        # Architecture Args
        if self.encoder_dims is None:
            try:
                self.encoder_dims = self.latent_dims
            except Exception as e:
                print(e)
                raise ValueError

        if self.decoder_dims is None:
            self.decoder_dims = list(reversed(self.encoder_dims[:-1]))
            self.decoder_dims.append(self.dataset.dim)
        else:
            pass