def generate(self):
        x = np.copy(self.__input_img).flatten()

        for i in range(self.__args.epochs):
            self.__logger.info('Epoch: {} started'.format(i))
            start_time = time.time()
            x, min_val, info = optimizer(self.__loss_evaluator.loss, x,
                                         fprime=self.__loss_evaluator.gradients,
                                         maxfun=self.__args.max_fun,
                                         disp=self.__args.verbose)
            self.__logger.info('Loss for epoch {} has value: {}'.format(i, min_val))

            # Save current generated image
            img = x.copy().reshape((self.__input_img.shape[1], self.__input_img.shape[2], 3))
            img = np.clip(img, 0, 255).astype(np.uint8)

            input_filename = self.__get_file_name(self.__input_file)
            style_filename = self.__get_file_name(self.__style_file)
            output_filename = "{}/{}_{}_iteration_{}.png".format(self.__args.output_dir,
                                                                 input_filename,
                                                                 style_filename,
                                                                 i)

            scipy.misc.imsave(output_filename, img)
            self.__logger.info("For epoch {} current image is saved in: ".format(i, output_filename))
            end_time = time.time()
            self.__logger.info('Iteration {} completed in {}secs'.format(i, round(end_time - start_time, 4)))
Exemple #2
0
def main():
    #Initialize parameters
    #Rs   = [0., 0.5, 1.0,  1.5,  0.0, 0., 0.5]
    Rs = [2.5, 2.8, 3.0, 3.2, 3.4, 3.6, 3.8]
    etas = [5, 10., 20., 40., 100., 200., 200.0]
    parametervector = []
    for i in range(len(Rs)):
        parametervector.append(etas[i])
        #parametervector.append(Rs[i])

    #x0 = np.array(parametervector)
    x0 = parametervector

    optimizer = 'basinhopping'

    #from scipy.optimize import minimize as optimizer
    #optimizer_kwargs = {
    #                    'method' : 'BFGS',
    #                    'options': {'gtol': 1e-15, }
    #                   }
    if optimizer == 'BFGS':
        from scipy.optimize import minimize as optimizer
        optimizer_kwargs = {
            'method': 'BFGS',
            'options': {
                'gtol': 1e-15,
            }
        }
        #optimizer_kwargs = {'method':'BFGS', 'gtol': 1e-15, }
    elif optimizer == 'basinhopping':
        from scipy.optimize import basinhopping as optimizer
        optimizer(get_loss, x0, niter=500, T=0.1, stepsize=0.01)
    elif optimizer == 'L-BFGS-B':
        from scipy.optimize import minimize as optimizer
        optimizer_kwargs = {
            'method': 'L-BFGS-B',
            'options': {
                'ftol': 1e-05,
                'gtol': 1e-08,
                'maxfun': 1000000,
                'maxiter': 1000000
            }
        }
        import scipy
        from distutils.version import StrictVersion
        if StrictVersion(scipy.__version__) >= StrictVersion('0.17.0'):
            optimizer_kwargs['options']['maxls'] = 2000
Exemple #3
0
 def train(self, instances):
     self.collect_x_y(instances, self.min_f, self.max_f)
     initial_lambda = np.zeros(self.get_total_number())
     returns = optimizer(self.calculate_log_likelihood, x0=initial_lambda,
                         fprime=self.calculate_gradient, iprint=1, maxiter=100)
     self.final_lambdas = returns[0]
Exemple #4
0
if pvalue < alpha:
    print('Normality test failed, with {:.2e} < {}.'.format(pvalue, alpha))
    print('Data is not Gaussian.')
print()

initial = [5300, 1.3, 0, 1000, 1, -4]
plt.hist(Data, bins=150, color='c', zorder=1, normed=True)
plt.plot(bin_centers,
         combinedGaussian(bin_centers, *initial),
         'r--',
         zorder=10)
plt.title('Histogram with Initial Fit for Data')
plt.xlabel('Value')
plt.ylabel('Frequency')
plt.show()

plt.hist(Data, bins=150, color='c', zorder=1, normed=True)
params, cov = optimizer(combinedGaussian, bin_centers, n, p0=initial)
plt.plot(bin_centers, combinedGaussian(bin_centers, *params), 'r--', zorder=10)
plt.title('Fitted Combined Gaussian for Given Data Values')
plt.xlabel('Value')
plt.ylabel('Frequency')
plt.show()

param_names = ['C1    ', 'sigma1', 'mean1 ', 'C2    ', 'sigma2', 'mean2 ']
print("Fit parameters:")
for i in range(len(params)):
    if (params[i] < 0):
        print('{} = {:.3e}'.format(param_names[i], params[i]))
    else:
        print('{} =  {:.3e}'.format(param_names[i], params[i]))
Exemple #5
0
def main():
  #Initialize parameters
  #Rs   = [0., 0.5, 1.0,  1.5,  0.0, 0., 0.5]
  global eta
  global Rs
  #etas   = [eta]
  #Rss = [Rs]
  etas = [100.]
  Rss = [0.]
  #g2_ref = calculate_fp([eta, Rs])
  
  parametervector = []
  for i in range(len(Rss)):
     parametervector.append(etas[i])
     parametervector.append(Rss[i])
 
  #x0 = np.array(parametervector)
  x0 = parametervector
 
 
  optimizer = 'basinhopping'
  #optimizer = 'L-BFGS-B'
 
  #from scipy.optimize import minimize as optimizer
  #optimizer_kwargs = {
  #                    'method' : 'BFGS',
  #                    'options': {'gtol': 1e-15, }
  #                   }
  if optimizer == 'BFGS':
     from scipy.optimize import minimize as optimizer
     optimizer_kwargs = {
                         'method' : 'BFGS',
                         'options': {'gtol': 1e-15, }
                        }
     #optimizer_kwargs = {'method':'BFGS', 'gtol': 1e-15, }
  elif optimizer == 'basinhopping':
     from scipy.optimize import basinhopping as optimizer
     minimizer_kwargs = {"method": "L-BFGS-B",
                         'options': {
                                     'maxiter': 10,
                                     #'maxfun':1
                                     }
                        }
     #mybounds = MyBounds()
     results = optimizer(get_loss, x0, 
                         #minimizer_kwargs=minimizer_kwargs,
                         niter=500,
                         T = 0.01,
                         stepsize = 1.0,
                         #niter_success=1000,
                         disp=True,
                         callback=print_fun,
      #                   accept_test=mybounds
                         )
     print results
     sys.exit()
  elif optimizer == 'L-BFGS-B':
     from scipy.optimize import minimize as optimizer
     optimizer_kwargs = {
                         'method': 'L-BFGS-B',
                         'options': {'ftol': 1e-05,
                                     'gtol': 1e-18,
                                     'maxfun': 1000000,
                                     'maxiter':1500000,
                                     'disp': True}
                        }
     import scipy
     from distutils.version import StrictVersion
     if StrictVersion(scipy.__version__) >= StrictVersion('0.17.0'):
         optimizer_kwargs['options']['maxls'] = 2000
  optimizer(get_loss, x0, jac=False, **optimizer_kwargs)
    return np.sum((y - Function(x, *fit_parameters))**2 / sigma**2)


filename = "C:/Users/ryank/Desktop/Work/Classes/Python/ASTR205/Data/"
filename += "ASTR 205 1-1.csv"
Data = np.loadtxt(filename, comments='#')
sigma = 1.9

plt.scatter(Data[:, 0], Data[:, 1], c='c', s=1)
plt.title('Initial Plot of the Given Data')
plt.xlabel('X values')
plt.ylabel('Y values')
plt.show()

guess = [1, -1, 1]
fit_params, fit_cov = optimizer(Function, Data[:, 0], Data[:, 1], p0=guess)

chi2 = chi_square(fit_params, Data[:, 0], Data[:, 1], sigma)
dof = len(Data[:, 0]) - len(fit_params)
print("\nGoodness of fit - chi square measure:")
print("Chi2/dof = {}\n".format(chi2 / dof))

fit_cov = fit_cov * dof / chi2
fit_params_error = np.sqrt(np.diag(fit_cov))
param_names = ['A', 'B', 'C']
print("Fit parameters:")
for i in range(len(fit_params)):
    if (fit_params[i] < 0):
        print('{} = {:.3e} +/- {:.3e}'.format(param_names[i], fit_params[i],
                                              fit_params_error[i]))
    else: