示例#1
0
    def __init__(self,
                 data,
                 n_levels=1,
                 min_q=0.0,
                 max_q=0.3,
                 porod_power=4.0,
                 io_guess=None,
                 rg_guess=None):
        self.q = data.q
        self.i = data.i
        self.s = data.s

        self.q_ori = self.q.deep_copy()
        sel = self.q < max_q + 0.5
        self.q_ori = self.q_ori.select(sel)

        sel = self.q < max_q
        self.q = self.q.select(sel)
        self.i = self.i.select(sel)
        self.s = self.s.select(sel)

        sel = self.q > min_q
        self.q = self.q.select(sel)
        self.i = self.i.select(sel)
        self.s = self.s.select(sel)

        self.n_levels = n_levels
        self.max_q = max_q

        self.i_scale = io_guess / 10.0
        self.i = self.i / self.i_scale
        self.s = self.s / self.i_scale

        self.eps = 1e-8
        if flex.min(self.q) > self.eps:
            self.eps = 1e-8

        self.cnst = 1.0 / math.sqrt(6.0)

        self.porod_power = porod_power
        self.rg_guess = rg_guess
        self.io_guess = io_guess

        self.x = None
        self.n = 1 + 5 * self.n_levels
        self.domain = []
        self.domain = [(0, 50), (5.0, 15.0), (rg_guess * 0.5, rg_guess * 1.5)]
        for ii in xrange(3, self.n):
            self.domain.append((0, 10))
        self.optimizer = de.differential_evolution_optimizer(
            self,
            monitor_cycle=300,
            max_iter=1000000,
            population_size=3 * self.n,
            n_cross=3,
            show_progress_nth_cycle=10,
            show_progress=False,
            f=0.95,
            eps=1e-8)
        self.unifit_curve = self.curve(self.x, self.q_ori)
示例#2
0
    def __init__(self, fixed, moving):
        self.fixed = fixed
        self.moving = moving

        self.nsde = nsd_engine(self.fixed)

        self.d_fixed = math.sqrt(self.nsde.d_fixed)
        self.d_moving = math.sqrt(self.nsde.get_mean_distance(self.moving))

        self.m_com = self.moving.mean()
        self.f_com = self.fixed.mean()
        self.n_mov = self.moving - self.m_com

        self.d = (self.d_fixed + self.d_moving) / 12

        self.n = 6
        pi = math.pi
        self.domain = [(-pi, pi), (-pi, pi), (-pi, pi), (-self.d, self.d),
                       (-self.d, self.d), (-self.d, self.d)]
        self.x = None
        self.optimizer = de.differential_evolution_optimizer(
            self,
            population_size=12,
            f=0.85,
            cr=0.95,
            n_cross=2,
            eps=1e-2,
            show_progress=False,
            show_progress_nth_cycle=20)
    def __init__(self, data, min_q, delta=0.15):
        self.data = data
        self.min_q = min_q
        self.max_q = self.min_q + delta

        self.q = data.q
        self.i = data.i
        self.s = data.s

        sel = (self.q > self.min_q) & (self.q < self.max_q)
        self.q = self.q.select(sel)
        self.i = self.i.select(sel)
        self.s = self.s.select(sel)

        self.n = 2
        self.x = flex.double()

        self.nn = self.q.size()

        self.domain = [(0, 1.0e12), (0, 5e3)]

        self.optimizer = de.differential_evolution_optimizer(
            self,
            monitor_cycle=300,
            max_iter=1000000,
            population_size=50,
            n_cross=3,
            show_progress_nth_cycle=1000,
            show_progress=True,
            f=0.75,
            eps=1e-8)
 def de(self):
     self.domain = [(-0.5, 0.5)] * self.n
     self.optimizer = de.differential_evolution_optimizer(
         self,
         population_size=self.n * 2,
         n_cross=1,
         f=0.85,
         eps=1e-5,
         monitor_cycle=50,
         show_progress=False)
     print '#', list(self.x)
 def __init__(self, name, npop=20):
     self.name = name
     self.fcount = 0
     self.n = 2
     self.x = None  # flex.double(self.n, 2)
     self.domain = [(start[0] - 1, start[0] + 1), (start[1] - 1, start[1] + 1)]
     self.optimizer = differential_evolution.differential_evolution_optimizer(
         self, population_size=npop, cr=0.9, n_cross=2, eps=1e-12, show_progress=False
     )
     print "DIFFERENTIAL EVOLUTION ITERATIONS", self.optimizer.count, self.fcount, "SOLUTION", list(
         self.x
     ), self.target(self.x)
 def de(self):
     self.n = self.n_fst_pass
     self.pofr = pr_tools.pofr(self.d_max, self.n, self.prior)
     self.domain = [(-0.1, 0.1)] * self.n
     self.optimizer = de.differential_evolution_optimizer(
         self,
         population_size=self.n,
         n_cross=1,
         f=0.85,
         eps=1e-5,
         monitor_cycle=50,
         show_progress=False)
示例#7
0
 def __init__(self, tot):
     self.to = tot
     self.n = 2
     self.x = []
     self.domain = [(0, 5), (0, 5)]
     self.eps = 1e-16
     self.optim = de.differential_evolution_optimizer(
         self,
         population_size=100,
         f=0.3,
         cr=0.9,
         n_cross=1,
         show_progress=True,
         show_progress_nth_cycle=1,
         eps=1e-12)
示例#8
0
 def __init__(self,
              tot):
   self.to=tot
   self.n=2
   self.x=[]
   self.domain= [ (0,5), (0,5) ]
   self.eps=1e-16
   self.optim =  de.differential_evolution_optimizer(
     self,
     population_size=100,
     f=0.3,
     cr=0.9,
     n_cross=1,
     show_progress=True,
     show_progress_nth_cycle=1,eps=1e-12)
 def __init__(self, name, npop=20):
     self.name = name
     self.fcount = 0
     self.n = 2
     self.x = None  #flex.double(self.n, 2)
     self.domain = [(start[0] - 1, start[0] + 1),
                    (start[1] - 1, start[1] + 1)]
     self.optimizer = differential_evolution.differential_evolution_optimizer(
         self,
         population_size=npop,
         cr=0.9,
         n_cross=2,
         eps=1e-12,
         show_progress=False)
     print("DIFFERENTIAL EVOLUTION ITERATIONS", self.optimizer.count,
           self.fcount, "SOLUTION", list(self.x), self.target(self.x))
示例#10
0
  def __init__(self, means, variances, reference_id=0,init_mean=1.0,spread=0.1, factor=50,f=0.7,eps=1e-12,out=None,show_progress=False,insert_solution_vector=None,add=True):
    self.out = None
    if self.out is None:
      self.out = sys.stdout

    self.add=add

    self.means = means
    self.vars  = variances
    self.ref = reference_id
    self.n_sets = len( self.means)
    self.map = self.setup_coeff_map()

    self.n = (len(self.means)-1)*2
    self.x = None
    self.domain = [ ( -spread+init_mean,spread+init_mean ) ]*self.n
    self.optimizer =  de.differential_evolution_optimizer(self,population_size=self.n*factor,show_progress=show_progress,eps=eps, f=f,n_cross=2,cr=0.8,insert_solution_vector=insert_solution_vector)
示例#11
0
  def __init__(self,q,i,s,up_rg, up_io):
    self.q = q
    self.i = i
    self.s = s

    self.x = None
    self.n = 2
    self.domain = []
    self.domain.append( (0,up_rg) )
    self.domain.append( (0,up_io ) )
    self.optimizer =  de.differential_evolution_optimizer(self,
                                                          monitor_cycle=100,
                                                          max_iter=1000000,
                                                          population_size=20,
                                                          n_cross=3,
                                                          show_progress_nth_cycle=500,
                                                          show_progress=False,
                                                          f=0.95,eps=1e-18)
示例#12
0
  def __init__(self, fixed, moving):
    self.fixed = fixed
    self.moving = moving

    self.nsde = nsd_engine(self.fixed)

    self.d_fixed  = smath.sqrt(self.nsde.d_fixed)
    self.d_moving = smath.sqrt(self.nsde.get_mean_distance( self.moving ) )

    self.m_com = self.moving.mean()
    self.f_com = self.fixed.mean()
    self.n_mov = self.moving-self.m_com

    self.d = (self.d_fixed+self.d_moving)/12

    self.n = 6
    pi = smath.pi
    self.domain = [ (-pi,pi),(-pi,pi), (-pi,pi), (-self.d,self.d),(-self.d,self.d), (-self.d,self.d) ]
    self.x = None
    self.optimizer = de.differential_evolution_optimizer(self, population_size=12,f=0.85,cr=0.95, n_cross=2,eps=1e-2,
                         show_progress=False,show_progress_nth_cycle=20)
示例#13
0
    def __init__(self,
                 means,
                 variances,
                 reference_id=0,
                 init_mean=1.0,
                 spread=0.1,
                 factor=50,
                 f=0.7,
                 eps=1e-12,
                 out=None,
                 show_progress=False,
                 insert_solution_vector=None,
                 add=True):
        self.out = None
        if self.out is None:
            self.out = sys.stdout

        self.add = add

        self.means = means
        self.vars = variances
        self.ref = reference_id
        self.n_sets = len(self.means)
        self.map = self.setup_coeff_map()

        self.n = (len(self.means) - 1) * 2
        self.x = None
        self.domain = [(-spread + init_mean, spread + init_mean)] * self.n
        self.optimizer = de.differential_evolution_optimizer(
            self,
            population_size=self.n * factor,
            show_progress=show_progress,
            eps=eps,
            f=f,
            n_cross=2,
            cr=0.8,
            insert_solution_vector=insert_solution_vector)
示例#14
0
    def __init__(self,
                 n_params,
                 n_fst_pass,
                 d_max,
                 data,
                 n_int=35,
                 simplex_trial=5):
        self.n = n_fst_pass
        self.n_coeff = n_params
        self.delta = self.n_coeff - self.n
        self.data = data
        self.d_max = max(self.data.q)
        self.n_int = n_int
        self.x = None

        self.ent = entropic_restraints.entropy_restraint()

        # make a pofr please
        self.pofr = pr_tools.pofr(self.d_max,
                                  self.n,
                                  n_int=self.n_int,
                                  m_int=self.n_int)

        self.q_weight = flex.bool(self.data.q < 0.1).as_double()

        self.weight = 1.0
        # first we do a global optimisation using a diferentail evolution search
        self.domain = []
        for ii in range(self.n):
            self.domain.append((-0.1, 0.1))
        self.optimizer = de.differential_evolution_optimizer(
            self,
            population_size=self.n,
            n_cross=1,
            f=0.85,
            eps=1e-5,
            monitor_cycle=50,
            show_progress=False)
        self.q_weight = self.q_weight * 0.0 + 1.0
        self.x = self.x.concatenate(flex.double([0] * self.delta))
        self.n = self.n + self.delta
        self.pofr = pr_tools.pofr(self.d_max,
                                  self.n,
                                  n_int=self.n_int,
                                  m_int=self.n_int)
        self.simplex_trial = simplex_trial
        self.simplex_scores = []
        self.simplex_solutions = []
        for ii in xrange(self.simplex_trial):
            #make a random simplex please
            self.weight = 1.0
            self.starting_simplex = []
            for ii in range(self.n + 1):
                self.starting_simplex.append(
                    0.10 * (flex.random_double(self.n) * 2 - 1.0) + self.x)

            self.optimizer = simplex.simplex_opt(dimension=self.n,
                                                 matrix=self.starting_simplex,
                                                 evaluator=self,
                                                 tolerance=1e-3)
            self.solution = self.optimizer.get_solution()
            self.score = self.target(self.solution)
            self.simplex_scores.append(self.score)
            self.simplex_solutions.append(self.solution)

        best_simplex_score = self.simplex_scores[0]
        this_simplex = 0
        for ii in xrange(self.simplex_trial):
            if self.simplex_scores[ii] < best_simplex_score:
                best_simplex_score = self.simplex_scores[ii]
                this_simplex = ii

        self.solution = self.simplex_solutions[this_simplex]

        #self.optimizer = simulated_annealing.sa_optimizer( self, self.solution, flex.double( self.n*[0.0051] ), start_t=2.1, end_t=0.001, burn_in=100, burn_out=50000, steps=5000 , show_progress=True)
        #self.solution, self.score = self.optimizer.get_solution()
        self.pofr.update(self.solution)

        self.calc_data = self.pofr.f(self.data.q)