def genetic_double_gaussian_fit(im, verbose=False) : "fit double gaussian profiles to the each of the spectra in imcube im\nold, dont use" shape=im.shape X=np.arange(float(shape[0])) res=np.empty((7,shape[1],shape[2])) for i in xrange(shape[1]): # f=open('log','a') # f.write(str(i)+'\n') # f.close() print i if not(verbose): actualstdout = sys.stdout sys.stdout = open(os.devnull,'w') for j in xrange(shape[2]): d=im[:,i,j] c=sorted(d)[shape[0]/2] peak =d.argmax() trough=d.argmin() mx,mn=d[peak]-c,d[trough]-c def cons (): r=rndint(0,2) if r==0: return np.array((nv(mx,mx/4), rnd()*15, nv(peak,10),\ nv(-mx,mx/4)*-10,rnd()*15, nv(peak,10),\ c*nv(1, 0.1))) elif r==1: return np.array((nv(mx,mx/4)*10, rnd()*15, nv(peak,10),\ nv(mn,mn/4)*-10,rnd()*15, nv(trough,10),\ c*nv(1, 0.1))) else : return np.array((nv(mn,mn/4)*10, rnd()*15, nv(trough,10),\ nv(mn,mn/4)*-10,rnd()*15, nv(trough,10),\ c*nv(1, 0.1))) def foo(params): #inlined gauss2 (this is the intermost part of the optimisation) a1,s1,m1,a2,s2,m2,c=params func1=np.exp(-(X-m1)**2/(2*s1*s1)) func2=np.exp(-(X-m2)**2/(2*s2*s2)) func1*=a1; func2*=a2; func1+=func2; func1+=c; func1-=d; func1*=func1 # blitz('func1=func1*a1+func2*a2+c')#not working for some reason return func1.sum() s=g.optimize(foo, cons, satisfactory=0.01*251, tolerance=0.001, its=100, hillWalks=2, verbose=verbose) r=g.optimize(foo, cons, satisfactory=0.01*251, tolerance=0.001, its=100, hillWalks=2, verbose=verbose) s.run() r.run() final=g.optimize(foo, cons, satisfactory=1.0, tolerance=0.0001, pool=s.pool+r.pool) final.run() res[:,i,j]=final.pool[0][1:] if not(verbose): sys.stdout.close() sys.stdout = actualstdout return res
def optimize(players, obj_func, initial_guess=None, verbose=False, constrained_players=None): """ Meta optimization using various underlying optimizers """ # we'll include various intermediate results in final GP for better diversity teams_to_include = [] if initial_guess is not None: teams_to_include.append(initial_guess) # first try a small genetic opt to get a reasonable starting point best_team, res = TOG.optimize(5, players, obj_func=obj_func, num_evolutions=100, constrained_players=constrained_players, include_individuals=teams_to_include) teams_to_include.append(copy.deepcopy(best_team)) best_obj = obj_func(best_team) if verbose: print("Results from initial genetic opt: {}".format(best_obj)) # now anneal that badboy best_team = TOA.optimize(best_team, players, obj_func=obj_func, constrained_players=constrained_players) teams_to_include.append(copy.deepcopy(best_team)) best_obj = obj_func(best_team) if verbose: print("Results after annealing: {}".format(best_obj)) # myopic too... best_team = TOM.optimize(best_team, players, obj_func=obj_func, constrained_players=constrained_players) teams_to_include.append(copy.deepcopy(best_team)) best_obj = obj_func(best_team) # include in large scale GP best_team, res = TOG.optimize(50, players, obj_func=obj_func, include_individuals=teams_to_include, constrained_players=constrained_players) best_obj = obj_func(best_team) if verbose: print("Results after 2nd pass genetic opt: {}".format(best_obj)) # and final myopic best_team = TOM.optimize(best_team, players, obj_func=obj_func, constrained_players=constrained_players) best_obj = obj_func(best_team) if verbose: print("Final result: {}".format(best_obj)) return best_team, best_obj
def mainfoo(im) : shape=im.shape X=arange(shape[0]) res=empty((7,shape[1],shape[2])) for i in xrange(shape[1]): f=open('log','a') f.write(str(i)+'\n') f.close() print i for j in xrange(shape[2]): c=sorted(im[:,i,j])[shape[0]/2] peak=im[:,i,j].argmax() def cons (): return array((random()*10,random()*10,random()*10-20+peak, random()*-10,random()*10,random()*20-10+peak,c*(random()*0.2+0.9))) def foo(params): d=im[:,i,j] return ((gauss2(X, params)-d)**2).sum() s=g.optimize(foo, cons, satisfactory=0.01, verbose=False) s.run() res[:,i,j]=s.pool[0][1:] return res
def optimize_guest_seating(guestsCsvFile, seatsCsvFile): """Optimize guest seating trying to maximize sum of their preferences Args: guestsCsvFile (file): csv containig matrix of guest preferences (the bigger the number the more they want to sit near) seatsCsvFile (file): csv containing matrix representing layout of tables Returns: list((int, string)): list of pairs (seat_id, guest_name) """ guests = pd.read_table(guestsCsvFile, sep=",", index_col=0) guest_names = guests.columns.values seats = pd.read_table(seatsCsvFile, sep=",", index_col=0, dtype=int) first_generation = [ np.random.permutation(range(len(guest_names))) for _ in range(10) ] def fitness_score(chromosome): score = 0 for i, x in enumerate(chromosome): for j, y in enumerate(chromosome): score += guests[guest_names[x]][guest_names[y]] * seats[str( i)][j] return score seating, score = genetic.optimize( first_generation, genetic.selections.elite_selection, genetic.crossovers.order_crossover, genetic.mutations.swap_mutation, fitness_score, 100, 0.3, ) _logger.info("Resulting score: %s", score) return [(i, guest_names[seating[i]]) for i in range(len(seating))]
def double_gaussian_fit_wCentral(im, verbose=False) : "fit double gaussian profiles to the each of the spectra in imcube im" shape=im.shape X=np.arange(float(shape[0])) res=np.empty((7,shape[1],shape[2])) temp=np.empty(X.shape) for i in xrange(shape[1]): # f=open('log','a') # f.write(str(i)+'\n') # f.close() # def blah (i): print i for j in xrange(shape[2]): d=im[:,i,j] c=sorted(d)[shape[0]/2] temp=d.copy() centre=shape[0]/2-8,shape[0]/2+9 temp[centre[0]:centre[1]]=[temp[centre[0]]*(1-x/16.)+temp[centre[1]]*x/16. for x in xrange (17)] peak =temp.argmax() trough=temp.argmin() mx,mn=temp[peak]-c,temp[trough]-c def cons (): r=rndint(0,2) if r==0: return np.array((nv(mx,mx/4), rnd()*15, nv(peak,10),\ nv(-mx,mx/4)*-10,rnd()*15, nv(peak,10),\ c*nv(1, 0.1))) elif r==1: return np.array((nv(mx,mx/4)*10, rnd()*15, nv(peak,10),\ nv(mn,mn/4)*-10,rnd()*15, nv(trough,10),\ c*nv(1, 0.1))) else : return np.array((nv(mn,mn/4)*10, rnd()*15, nv(trough,10),\ nv(mn,mn/4)*-10,rnd()*15, nv(trough,10),\ c*nv(1, 0.1))) def err(params,x,y): a1,s1,m1,a2,s2,m2=params func1=np.exp(-(x-m1)**2/(2*s1*s1)) func2=np.exp(-(x-m2)**2/(2*s2*s2)) func1*=a1; func2*=a2; func1+=func2; func1+=c; func1-=y; return func1 v,success=optimize.leastsq(err, (d[peak]-c, 10, peak , d[trough]-d[peak], 10, trough), args=(X,temp)) if success!=1: def foo(params): #inlined gauss2 (this is the intermost part of the optimisation) a1,s1,m1,a2,s2,m2,c=params func1=np.exp(-(X-m1)**2/(2*s1*s1)) func2=np.exp(-(X-m2)**2/(2*s2*s2)) func1*=a1; func2*=a2; func1+=func2; func1+=c; func1-=d; func1*=func1 return func1.sum() if not(verbose): actualstdout = sys.stdout sys.stdout = open(os.devnull,'w') #if leastsq doesnt give a goot fit then try to fit with a genetic alg s=g.optimize(foo, cons, satisfactory=2.51, tolerance=0.001, its=100, hillWalks=2, verbose=verbose, startWalk=True) r=g.optimize(foo, cons, satisfactory=2.51, tolerance=0.001, its=100, hillWalks=2, verbose=verbose, startWalk=True) s.run() r.run() final=g.optimize(foo, cons, satisfactory=1.0, tolerance=0.0001, pool=s.pool+r.pool, finalWalk=True) final.run() if not(verbose): sys.stdout.close() sys.stdout = actualstdout if final.pool[0][0]<=1.5: res[:,i,j]=final.pool[0][1:] else: res[:,i,j]=np.nan else : for k in xrange(len (v)): res[k,i,j]=v[k] res[ -1,i,j]=c if verbose : plot(d) plot (temp) plot (gauss2(np.arange(251.), res[:,i,j])) return res