Example #1
0
    def update(self, psObj, block):

        if self.growFreq is not None:
            if psObj.k % self.growFreq == 0:
                # time to grow the stepsize
                upper_bound = (1 + self.alpha * self.eta) * self.steps[block]
                desired_step = self.growFac * self.steps[block]
                self.steps[block] = min([upper_bound, desired_step])

        psObj.embedded.setStep(self.steps[block])

        thisSlice = psObj.partition[block]

        phi = (psObj.Hz - psObj.xdata[block]).T.dot(psObj.ydata[block] -
                                                    psObj.wdata[block])

        xold = npcopy(psObj.xdata[block])
        yold = npcopy(psObj.ydata[block])

        t1 = (1 - self.alpha) * xold + self.alpha * psObj.Hz
        t2 = npcopy(self.gradxdata[block])
        t2 -= psObj.wdata[block]
        while True:
            t = t1 - self.steps[block] * t2
            psObj.xdata[block][1:] = psObj.embedded.getProx(t[1:])
            psObj.xdata[block][0] = t[0]

            self.gradxdata[block] = self._getAGrad(psObj, psObj.xdata[block],
                                                   thisSlice)
            psObj.ydata[block] = self.steps[block]**(-1) * (
                t - psObj.xdata[block]) + self.gradxdata[block]

            yhat = self.steps[block]**(-1)*( (1-self.alpha)*xold +self.alpha*psObj.Hz - psObj.xdata[block] )\
                    + psObj.wdata[block]
            phiPlus = (psObj.Hz -
                       psObj.xdata[block]).T.dot(psObj.ydata[block] -
                                                 psObj.wdata[block])

            lhs1 = norm(psObj.xdata[block] - self.thetahat[block], 2)
            rhs1 = (1-self.alpha)*norm(xold -self.thetahat[block] ,2) \
                    + self.alpha*norm(psObj.Hz-self.thetahat[block],2) \
                    + self.steps[block]*norm(psObj.wdata[block] - self.what[block],2)
            if lhs1 <= rhs1:
                numer = norm(yhat - psObj.wdata[block], 2)**2
                denom = norm(psObj.ydata[block] - psObj.wdata[block], 2)**2
                rhs2_1 = 0.5 * (self.steps[block] /
                                self.alpha) * (denom + self.alpha * numer)

                rhs2_2 = (1 -
                          self.alpha) * (phi - 0.5 *
                                         (self.steps[block] / self.alpha) *
                                         norm(yold - psObj.wdata[block], 2)**2)

                if phiPlus >= rhs2_1 + rhs2_2:
                    #backtracking termination criteria satisfied
                    self.eta = numer / denom
                    break

            self.steps[block] *= self.delta
            psObj.embedded.setStep(self.steps[block])
Example #2
0
def grad(step,x,f,params0,paramsFixed,nKtargets,maindir,poscarsDir,vaspinputdir):
    '''Advance each component of x separately and compute each 
    cost to estimate the gradient. These run in parallel.'''
    jobIDs = []
    f1arr = zeros(len(x),dtype=float)
    gradfactor = (1+step)
    x1 = x * gradfactor
    dx = x1-x
    for i in range(len(x)):
        xtemp = npcopy(x)
        xtemp[i] = x1[i] #change only one parameter at a time
        jobIDs = submit(i,jobIDs,append(multiply(xtemp,params0),paramsFixed),nKtargets,maindir,poscarsDir,vaspinputdir)
    waitJobs(jobIDs)
    gcosts = []
    print
    for i in range(len(x)):
        pdir = '{}/p{}'.format(maindir,i)
        [cost,avgnDone] = analyzeNks.analyze([pdir])
        f1arr[i] = cost
        gcosts.append(cost) 
        print 'gcost {}: {}'.format(i,f1arr[i])
    grad = divide(f1arr-f,dx)
    print 'grad',grad
    minGradcost = min(gcosts)
    if minGradcost < 0.95*f: #use the corresponding x in the search
        imin = argmin(gcosts)
        xnew = npcopy(x)
        xnew[imin] = x1[imin]
        returnList = [xnew,minGradcost]
    else:
        returnList = None
    return grad,returnList
Example #3
0
    def get_landmark_probs_for_points(self, landmarks, points, xs, ys, x, y):

        def get_probabilities(landmark):
            epsilon = 0.02
            distances = array([ landmark.distance_to_point(point)
                if not (isinstance(landmark.representation,RectangleRepresentation) and landmark.representation.contains_point(point))
                else 9*epsilon for point in points])
            # distances = array([ landmark.distance_to_point(point)
            #     if not (isinstance(landmark.representation,RectangleRepresentation) and landmark.representation.contains_point(point))
            #     else min(poly_to_vec_distance(landmark.representation.get_geometry().to_polygon(),point),landmark.representation.middle.distance_to(point))
            #     for point in points])
            # scores = 1.0/(distances + epsilon)**0.5
            std = .1
            scores = exp( -(distances/std)**2)
            # if scores.sum() != 0:
            # print landmark, scores.sum(), max(scores)
            return scores/scores.sum()
            # else: return scores

        sum_probs = None
        prob_lists = []
        original_probs = []

        syms = ['\\', '|', '/', '-']

        for i, landmark in enumerate(landmarks):
            probs = get_probabilities(landmark)
            original_probs.append( npcopy(probs) )
            # probabilities = probs.reshape( (len(xs),len(ys)) ).T
            # plt.pcolor(x, y, probabilities, cmap = 'jet', edgecolors='none', alpha=0.7)
            # plt.colorbar()
            # plt.title(str(landmark)+" before")
            # plt.show()

            # print landmark, probs.sum(), max(probs)
            if sum_probs is None: sum_probs = npcopy(probs)
            else: sum_probs += probs
            prob_lists.append( probs )
            sys.stdout.write("\b%s" % syms[i % len(syms)])
            sys.stdout.flush()

        for lmk,probs in zip(landmarks, prob_lists):

            # print
            # probabilities = probs.reshape( (len(xs),len(ys)) ).T
            # plt.pcolor(x, y, probabilities, cmap = 'jet', edgecolors='none', alpha=0.7)
            # plt.colorbar()
            # plt.title(str(lmk)+" before")
            # plt.show()

            probs /= sum_probs

            # probabilities = probs.reshape( (len(xs),len(ys)) ).T
            # plt.pcolor(x, y, probabilities, cmap = 'jet', edgecolors='none', alpha=0.7)
            # plt.colorbar()
            # plt.title(str(lmk)+" after")
            # plt.show()

        return prob_lists, original_probs
Example #4
0
    def get_relation_probs_for_points(self, points, landmark, landmark_heatmap, original_landmark_heatmap, perspective):
        def instantiate_relations(landmark):

            bullshit_trajector = Landmark(None, PointRepresentation(Vec2(0, 0)), None)
            relations = []
            for rel in DistanceRelationSet.relations:
                for dist_class, deg_class in list(product([Measurement.NEAR, Measurement.FAR], Degree.all)):
                    relation = rel(perspective, landmark, bullshit_trajector)
                    relation.measurement.best_distance_class = dist_class
                    relation.measurement.best_degree_class = deg_class
                    relations.append(relation)

            for rel in ContainmentRelationSet.relations:
                relation = rel(perspective, landmark, bullshit_trajector)
                relations.append(relation)

            for rel in OrientationRelationSet.relations:
                for dist_class, deg_class in list(product([Measurement.FAR], Degree.all)) + [
                    (Measurement.NONE, Degree.NONE)
                ]:
                    relation = rel(perspective, landmark, bullshit_trajector)
                    relation.measurement.best_distance_class = dist_class
                    relation.measurement.best_degree_class = deg_class
                    relations.append(relation)
            return relations

        syms = ["\\", "|", "/", "-"]

        relations = instantiate_relations(landmark)
        rel_points_probs = []
        original_probs = []
        sum_probs = None
        for i, relation in enumerate(relations):
            probs = self.get_probabilities_points(points, relation, None, None)
            if probs.sum() != 0:
                probs /= probs.sum()
            original_probs.append(npcopy(probs))
            if sum_probs is None:
                sum_probs = npcopy(probs)
            else:
                sum_probs += probs
            rel_points_probs.append(probs)
            sys.stdout.write("\b%s" % syms[i % len(syms)])
            sys.stdout.flush()

        # normalize across relations
        for probs in rel_points_probs:
            probs /= sum_probs
            probs *= landmark_heatmap

        # TODO treat ori_relations differently

        return relations, zip(rel_points_probs, original_probs)
Example #5
0
def randSteps(allRands,step,x,params0,nKtargets,maindir,poscarsDir,vaspinputdir):
    '''Advance each component of x separately and return costs and x's. These run in parallel.'''
    jobIDs = []
    xs = []
#     for i in range(len(x)):
#         xtemp = npcopy(x)
#         xtemp[i] *= (1 + sign(uniform(-1,1)) * step )#change only one parameter at a time
#         xs.append(xtemp)
    for i in range(len(x)): #change all parameters with random sign by step
        for itry in range(1000):
            xtemp = npcopy(x)
            for j in range(len(x)):   
                xtemp[j] *= (1 + sign(uniform(-1,1)) * step )#change only one parameter at a time
            xsStr = '['
            for xi in xtemp:
                xsStr += ' {:6.2f}'.format(xi)
            if xsStr not in allRands:
                break #this one is unique
        xs.append(xtemp)
        jobIDs = submit(i,jobIDs,append(multiply(xtemp,params0),paramsFixed),nKtargets,maindir,poscarsDir,vaspinputdir)
    waitJobs(jobIDs)
    rcosts = []
    for i in range(len(x)):
        pdir = '{}/p{}'.format(maindir,i)
        [cost,avgnDone] = analyzeNks.analyze([pdir])
        rcosts.append(cost) 
    return rcosts,xs
Example #6
0
def modIncar(newdir):
    lines = readfile('{}/INCAR'.format(newdir))
    plines = readfile('{}/POTCAR'.format(newdir))
    for line in plines:
        if 'ENMAX' in line:
            enmax = 1.4*float(line.split()[2].replace(';',''))
            break
    for i,line in enumerate(npcopy(lines)):
        if 'ENMAX' in line:
            lines[i] = 'ENMAX = {}\n'.format(enmax)
            break
    writefile(lines,'{}/INCAR'.format(newdir))     
    return
Example #7
0
    def make_spectra_plot(self,
                          ax=None,
                          add_current_fig=False,
                          header=None,
                          no_return=False,
                          xscale='log',
                          show_now=False):
        """Class methods are similar to regular functions.
        Note:
                Do not include the `self` parameter in the ``Args`` section.
        Args:
                param1: The first parameter.
                param2: The second parameter.
        Returns:
                True if successful, False otherwise.
        """
        import matplotlib.pyplot as plt

        if self.verbose:
            info_message('Creating Planetary Spectral Plot for {}'.format(
                self.input_planet_name))

        if ax is None:
            ax = plt.gcf().get_axes()[0] \
                if add_current_fig else plt.figure().add_subplot(111)

        if header is None:
            header = list(npcopy(self.header))

        if len(header) == 4:
            # assume same order as self.header:
            header.remove(header[1])

        if self.planetary_spectra_table is None:
            self.get_spectra()

        ax.errorbar(self.planetary_spectra_table[header[0]].values,
                    self.planetary_spectra_table[header[1]].values,
                    self.planetary_spectra_table[header[2]].values,
                    fmt='o')

        ax.set_xscale(xscale)

        if show_now:
            plt.show()

        if not no_return:
            return ax
Example #8
0
    'spd', 'spy'
]

# Time constants
spm = 60.  # seconds per minute
mph = 60.  # minutes per hour
hpd = 24.  # hours per day
spd = spm * mph * hpd
spy = spd * 365.
mpy = 12.  # months per year
dpy_noleap = 365.0  # days per year (for no leap year calendars)
dpy_gregorian = 365.25  # days per year
dpy_360 = 360.0  # days per year (for 30 days/month)
dpm_noleap = asarray([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31],
                     dtype='float')  # days per month
dpm_gregorian = npcopy(dpm_noleap)
dpm_gregorian[1] = dpm_gregorian[1] + 0.25
dpm_360 = ones(int(mpy)) * 30.
mid_months = asarray([
    15.5, 45., 74.5, 105., 135.5, 166., 196.5, 227.5, 258., 288.5, 319., 349.5
],
                     dtype='float')
lbl_months = [
    "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct",
    "Nov", "Dec"
]
bnd_months = asarray(
    [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365], dtype='int')

dpy = {
    "noleap": dpy_noleap,
def fillimpulseresponses(printfiles = True,samplefiles = False):
    (s_collection_ft,n_collection_ft) = (nparray([0,0,0],dtype=complex),nparray([0,0,0],dtype=complex))
    filepath = '../data_fs/ave1/'
    filematch = filepath + 'C1--LowPulseHighRes-in-100-out1700-an2100--*.txt'
    filelist = glob.glob(filematch)


    print('filling impulse response files\n\tnum files = %i' % len(filelist))

    for i,f in enumerate(filelist):

        ## processing images 
        ## samplefiles = False
        m = re.search('(.+).txt$',f)
        if (i%10 == 0 and samplefiles):
            outname_spect = m.group(1) + '.spect.dat'
            outname_time = m.group(1) + '.time.dat'
            outname_simTOF = m.group(1) + '.simTOF.dat'

        fi = open(f, "r")
        for passline in range(6):
            headline = '# ' + fi.readline()
        (t,v) = fi.readline().split()
        v_vec=nparray(float(v),dtype=float)
        t_vec=nparray(float(t)*1.e9,dtype=float)
        for line in fi:
            (t,v) = line.split()
            v_vec = row_stack((v_vec,float(v)))
            t_vec = row_stack((t_vec,float(t)*1.e9))
        fi.close()
        #Get the mean time-step for sake of frequencies
        dt = mean(diff(t_vec,n=1,axis=0))
        #FFT the vector
        v_vec_ft = FFT(v_vec,axis=0)
        f = FREQ(v_vec_ft.shape[0],dt)
        m_extend = 10
        f_extend = FREQ(v_vec_ft.shape[0]*m_extend,dt)
        t_extend = arange(0,((t_vec[-1]-t_vec[0])+dt)*m_extend,dt)
        # deep copy for the noise extimation 
        n_vec_ft = npcopy(v_vec_ft)
        # find indices where there is only noise in the power, and indices with predominantly signal
        # replace the signal elements in the noise vector with a random sampling from the noise portion
        chooseinds = nparray([i for i,nu in enumerate(f) if (npabs(nu)> 6.5 and npabs(nu)<(20))])
        replaceinds = nparray([i for i,nu in enumerate(f) if npabs(nu)< 6.5])
        values = choice(n_vec_ft[chooseinds,0],len(replaceinds))
        n_vec_ft[replaceinds,0] = values

        ## build noise vector and add to n_collection_ft
        # sort inds for f and use for interp to extend noise in fourier domain
        inds = argsort(f)
        n_vec_extend_ft_r = interp(f_extend,f[inds],npabs(n_vec_ft[inds,0]))
        n_vec_extend_ft_phi = choice(npangle(n_vec_ft[:,0]),f_extend.shape[0])
        n_vec_extend_ft = nprect(n_vec_extend_ft_r,n_vec_extend_ft_phi)
        n_vec_extend_ft.shape = (n_vec_extend_ft.shape[0],1)
        
        if n_collection_ft.shape[0] < n_vec_extend_ft.shape[0]:
            n_collection_ft = npcopy(n_vec_extend_ft)
           # s_collection_ft.shape = (s_collection_ft.shape[0],1)
        else:
            n_collection_ft = column_stack((n_collection_ft,n_vec_extend_ft))

        ## build signal vector and add to n_collection_ft
        noiseamp = nppower(mean(npabs(values)),int(2))
        sigamp = nppower(mean(nparray([i for i,nu in enumerate(f) if npabs(nu)< 1.0])),int(2))
        s_vec_ft = npcopy(v_vec_ft)
        s_vec_ft[:,0] *= Weiner(f,sigamp,noiseamp,cut = 5,p = 4) * fourier_delay(f,-40) ## Weiner filter and dial back by 40 ns

        if samplefiles:
            out = column_stack((f,npabs(v_vec_ft),npabs(n_vec_ft),npabs(s_vec_ft)))
            savetxt(outname_spect,out,fmt='%.4f')

        s_vec = real(IFFT(s_vec_ft,axis=0))
        s_vec_extend = zeros((f_extend.shape[0],1),dtype=float) 
        s_vec_extend[:s_vec.shape[0],0] = s_vec[:,0]
        s_vec_extend_ft = FFT(s_vec_extend,axis=0)

        if s_collection_ft.shape[0] < s_vec_extend_ft.shape[0]:
            s_collection_ft = npcopy(s_vec_extend_ft)
           # s_collection_ft.shape = (s_collection_ft.shape[0],1)
        else:
            s_collection_ft = column_stack((s_collection_ft,s_vec_extend_ft))

        # first sum all the Weiner filtered and foureir_delay() signals, then add the single noise vector back
    if printfiles:
        outpath = '../data_fs/extern/'
        filename = outpath + 'signal_collection_ft'
        npsave(filename,s_collection_ft)
        filename = outpath + 'noise_collection_ft'
        npsave(filename,n_collection_ft)
        filename = outpath + 'frequencies_collection'
        npsave(filename,f_extend)
        filename = outpath + 'times_collection'
        npsave(filename,t_extend)

    return (s_collection_ft,n_collection_ft,f_extend,t_extend)
def calcpkmnresistances( thispkmncsvdata, csvheaders):
    global allresitances
    global abilitiesaffectingresistances
    global alltypesindex

    pkmnrestances = npcopy(allresitances[ alltypesindex[ thispkmncsvdata[csvheaders['Type1']] ]])
    #print(pkmnrestances)
    if thispkmncsvdata[csvheaders['Type2']] is not '':
        pkmnrestances2 = npcopy(allresitances[ alltypesindex[ thispkmncsvdata[csvheaders['Type2'] ]]])
        pkmnrestances *= pkmnrestances2
    
    if generation >= 3:
        pkmnabilities = set()

        abilityheaders = ['Ability1','Ability2']
        if generation >= 5:
            abilityheaders += ['HiddenAbility']

        for abileheader in abilityheaders:
            if thispkmncsvdata[csvheaders[abileheader]] is not '':
                pkmnabilities.add(thispkmncsvdata[csvheaders[abileheader]])

        possibleabilities = pkmnabilities.intersection(abilitiesaffectingresistances)

        if len(possibleabilities) > 0:
            ability = possibleabilities.pop()

            if len(possibleabilities) >= 1:
                print('assuming ' + str(thispkmncsvdata[csvheaders['Name']]) + ' has ' + str(ability) + ' and not ' + str(possibleabilities))
            else:
                print('assuming ' + str(thispkmncsvdata[csvheaders['Name']]) + ' has ' + str(ability))

            if ability == 'Dry Skin':
                pkmnrestances[ alltypesindex['Fire']] *= 1.25
                pkmnrestances[ alltypesindex['Water']] = 0
            elif ability == 'Thick Fat':
                pkmnrestances[ alltypesindex['Fire']] *= 0.5
                pkmnrestances[ alltypesindex['Ice']] *= 0.5
            elif ability == 'Fluffy':
                pkmnrestances[ alltypesindex['Fire']] *= 2
            elif ability == 'Heatproof' or ability == 'Water Bubble':
                pkmnrestances[ alltypesindex['Fire']] *= 0.5
            elif ability == 'Primordial Sea':
                pkmnrestances[ alltypesindex['Fire']] = 0
            elif ability == 'Desolate Land' or ability == 'Water Absorb':
                pkmnrestances[ alltypesindex['Water']] = 0
            elif ability == 'Volt Absorb' or ability == 'Lightning Rod' or ability == 'Motor Drive':
                pkmnrestances[ alltypesindex['Electric']] = 0
            elif ability == 'Levitate':
                pkmnrestances[ alltypesindex['Ground']] = 0
            elif ability == 'Wonder Guard':
                for thistype in pkmnrestances:
                    if thistype < 2:
                        thistype = 0
            elif ability != 'Truant':
                print('Error: no handler for ' + str(ability))
        else:
            ability = ''
    else:
        ability = ''
    return [ability, list(pkmnrestances)]
Example #11
0
def searchParams(params0,paramsFixed,maindir,poscarsDir,vaspinputdir,nKtargets):
    '''Uses gradient search. The vector x is divide(params,params0).  Deal with x here only.
    ''' 
#     xbest = array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
    xbest = array([1.0]*len(params0))
    itermax = 100
    gnormTol = 0.001
    minstep = 0.0001
    iIter = 0
    step = 0.1  
    fbest = Nkcost(multiply(xbest,params0),paramsFixed,nKtargets,maindir,poscarsDir,vaspinputdir)
    print '\tDefault parameters cost {:6.2f}'.format(fbest)
    returnList = []
    while not returnList is None:
        if len(returnList)>0:
            print 'Moving to low cost point found in grad routine'
            xbest = returnList[0]
            fbest = returnList[1]
        gr,returnList = grad(max([step,0.01]),xbest,fbest,params0,paramsFixed,nKtargets,maindir,poscarsDir,vaspinputdir)
    gnorm  = norm(gr)
    gnormstart = gnorm
    fstart = fbest
    method = 'steepest'
    print 'Initial cost',fbest,'gnorm',gnorm,xbest 
    atMinStep = False
    while iIter < itermax and gnorm > gnormTol and not atMinStep:
        print iIter, #progress bar   
        lower = False
        while not lower:
            if method == 'steepest':
                xnew = xbest - step*gr
            fnew =  Nkcost(multiply(xnew,params0),paramsFixed,nKtargets,maindir,poscarsDir,vaspinputdir)
            print '\tCost {:6.2f}'.format(fnew),xnew,step
            if fnew < fbest:
                lower = True
                fbest = fnew
                xbest = npcopy(xnew)
                returnList = []
                while not returnList is None:
                    if len(returnList)>0:
                        print 'Moving to low cost point found in grad routine'
                        xbest = returnList[0]
                        fbest = returnList[1] 
                    gr,returnList = grad(max([step,0.01]),npcopy(xbest),fbest,params0,paramsFixed,nKtargets,maindir,poscarsDir,vaspinputdir)
                gnorm  = norm(gr)
                step *= 2
            else:
                step /= 2
                if step < minstep:
                    print 'minimum step reached: {}'.format(step) 
                    atMinStep = True
                    break
        iIter += 1                   
#     newParams = currparams
    print 'For {} parameters and {} steps'.format(len(xbest),iIter)
    print '\tStarting cost',fstart, 'gnorm',gnormstart
    print '\tEnding cost',fnew,'gnorm',gnorm,'step',step#, 'grad', gnew
    if gnorm <= gnormTol:
        print '\nSuccess after {} iterations'.format(iIter)
    elif iIter == itermax:
        print '\nExceeded maximum number of iterations ({}), while gnorm {} is greater than the tolerance {}'.format(itermax,gnorm,gnormTol)
    if fnew >= fstart:
        print('Did not find a lower cost!')
Example #12
0
def searchParamsAll(maindir,poscarsDir,vaspinputdir,nKtargets):
    '''Set up a N-dimensional grid of parameters, and run through all of it''' 
    paramLabels = ['power','wallPower','wallfactor','wallClose','wallOffset','dw' ]
    print 'Parameters in method'
    print'\t{}'.format(paramLabels)
#     print '\twallPower equals power'
#     print '\tdw held at {}'.format(sys.argv[1])

# #  
#     params0 =     [ 4.0,6.0 ]   #['power','wallPower','wallfactor','wallClose','wallOffset','dw' ]
#     params1 =     [ 2.0,4.0]   #wallPower
#     params2 =     [ 0.1,0.3,1.0] #wallfactor
#     params3 =     [ 0.05] #wallClose
#     params4 =     [ 0] #wallOffset
#     params5 =     [0.5, 1.0] #dw
#  best: 1.57 [ 6.    4.    1.    0.05  0.    0.5 ]] avg nDone 19.0


# 
#     params0 =     [ 6.0]   #['power','wallPower','wallfactor','wallClose','wallOffset','dw' ]
#     params1 =     [ 4.0, 3.0]   #wallPower
#     params2 =     [ 0.2, 0.5, 1.0] #wallfactor
#     params3 =     [ 0.05] #wallClose
#     params4 =     [ 0.0] #wallOffset
#     params5 =     [ 0.5] #dw

    params0 =     [ 5.0 ]   #['power','wallPower','wallfactor','wallClose','wallOffset','dw' ]
    params1 =     [ 2.0 ]   #wallPower
    params2 =     [ 1.3 ] #wallfactor
    params3 =     [ 0.05] #wallClose
    params4 =     [ 0.0 ] #wallOffset
    params5 =     [ 0.5 ] #dw 
    

    '''Si:     1.00 [ 5.    2.    1.3   0.05  0.    0.5 ]] avg nDone 18.0

    *1.16 [ 5.    2.  1.3   0.05   0.   0.5 ]] avg nDone 20.0
                1.316  4    2    1     0.1    0    0.5    20
               1.29 [ 4.    3.  1.    0.05   0.   0.5 ]] avg nDone 20.0
               
               1.269  5     3   0.7   0.05   0    0.5  20
               1.27 [ 6.    2.    1.    0.05  0.    0.5 ]] avg nDone 20.0
               1.21 [ 6.    3.    0.5   0.05  0.    0.5 ]] avg nDone 19.0

      SC   need to test with other semiconductors    
                1.359    20    4    3    1    0.05   0    0.5
                1.411    20    5    2    1    0.05   0    0.5
                1.412    20    5    2    1    0.1    0    0.5
                1.475    20    4    2    1    0.1    0    0.5
                
      metals : 
           
                1.383    17.87    4    2    1.5    0.05    0    0.5
                1.384    17.86    4    2    1.3    0.05    0    0.5
                1.392    17.94    4    2    1.7    0.05    0    0.5
                1.395    17.6    5    2    1.3    0.05    0    0.5
                1.403    17.56    5    2    1.5    0.05    0    0.5
                1.409    17.69    5    3    1.3    0.05    0    0.5

               *1.401    17.66    4    2    1    0.1    0    0.5
                1.402    17.66    5    2    1    0.1    0    0.5
                1.404    17.76    5    2    1    0.05   0    0.5
                1.406    17.54    5    3    1    0.1    0    0.5
                1.408    17.72    5    3    1    0.05   0    0.5
               *1.41     17.91    4    2    1    0.05   0    0.5
                1.417    17.76    4    3    1    0.1    0    0.5
                1.422    17.84    4    3    1    0.05   0    0.5
'''


#     params5 =  [0.1]
#     params5 =  [float(sys.argv[1])]
    nP =len( paramLabels)
    nPsets = len(params0)*len(params1)*len(params2)*len(params3)*len(params4)*len(params5)
    print 'Will run {} parameter sets'.format(nPsets)
    all = zeros(nPsets,dtype = [('cost','float'),('params','{}float'.format(nP))])
    iset = 0
    nRunSlots = min(100,nPsets)#run slots are directories that run a paramSet.  Should be <= nPsets
    slotsJobIDs = [[]]*nRunSlots
    slotsIsets =  zeros(nRunSlots,dtype = int32)
    os.chdir(maindir)
    os.system('rm -r -f r*')
    for i in range(nRunSlots):
        rdir = '{}/r{}'.format(maindir,i)
        os.mkdir(rdir)
    isetsToStart = range(nPsets)
    isetsDone = []
    for p0 in params0:
        for p1 in params1:
            for p2 in params2:
                for p3 in params3:
                    for p4 in params4:
                        for p5 in params5:
                            params = [p0,p1,p2,p3,p4,p5]                      
                            all[iset]['params'] = params
                            iset += 1
    summary = open('{}/summaryGrid.csv'.format(maindir),'w')
    summary.write('iset,cost,avgDone,power,wallPower,wallfactor,wallClose,wallOffset,dw\n')
    toAnalyze = []
    iwait = 0
    minCost = 100
    iminCost = None
    bestParams = []
    while len(isetsDone) < nPsets:
        if len(isetsToStart) > 0:
            icurrSet = isetsToStart[0]
            for ir in range(nRunSlots):
                if len(slotsJobIDs[ir]) == 0 and not ir in toAnalyze: #use this slot for next set
                    iwait = 0; print     
                    #start new set
                    params = all[icurrSet]['params']
                    jobIDs = submitSet(ir,params,maindir,poscarsDir,vaspinputdir,nKtargets)
                    subprocess.call(['echo', '\tFor set {} in slot {}, submitted {} jobs, ID range {} , {}'.format(icurrSet+1,ir,len(jobIDs),jobIDs[0],jobIDs[-1],icurrSet)])
                    slotsJobIDs[ir] = jobIDs
                    isetsToStart.pop(0)
                    toAnalyze.append(ir)
                    slotsIsets[ir] = icurrSet
                    if len(slotsJobIDs[-1]) > 0: #slots have all started work
                        print '\twait', 
                    break #submit one set at a time
        if len(toAnalyze) > 0:
            for ir in range(nRunSlots):
                if len(slotsJobIDs[ir]) == 0: 
                    if ir in toAnalyze: #the slot's previous calc has not been analyzed
                        print
                        rdir = '{}/r{}'.format(maindir,ir)
                        [cost,avgnDone] = analyzeNks.analyze([rdir])
                        ioldSet = slotsIsets[ir]
                        all[ioldSet]['cost'] = cost
                        if cost < minCost: 
                            minCost = cost
                            bestAvgNdone = avgnDone
                            bestParams = all[ioldSet]['params']
                            iminCost =  ioldSet
                            
                            os.system('cp -r {}/loglog.png {}/best_loglog.png'.format(rdir,maindir))
                            os.system('cp -r {}/methodErrs.png {}/best_methodErrs.png'.format(rdir,maindir)) 
                            os.system('cp -r {}/summary.csv {}/best_summary.csv'.format(rdir,maindir))
                            if os.path.exists('{}/bestRun'.format(rdir,maindir)):
                                os.system('rm -r -f {}/bestRun'.format(rdir,maindir))
                            os.system('cp -r {} {}/bestRun'.format(rdir,maindir))
                        if iminCost is None: sys.exit('No minimum cost run found.  All runs likely failed')                        
                        print 'cost for set {}: {:6.2f} {}] avg nDone {}'.format(ioldSet,cost,all[ioldSet]['params'],avgnDone)
                        print 'vs. min cost {}: {:6.2f} {}] avg nDone {}'.format(iminCost,minCost,bestParams,bestAvgNdone)
                        ps = all[ioldSet]['params']
                        summary.write('{},{:6.3f},{:6.2f},{},{},{},{},{},{}\n'.format(ioldSet,cost,avgnDone,
                                                    ps[0],ps[1],ps[2],ps[3],ps[4],ps[5]))
                        summary.flush()
                        toAnalyze.remove(ir)
                        isetsDone.append(ioldSet) 
                        break #analyze one set at a tome
    
        #update slotsJobIDs
        output = []
        while len(output) in [0]: #[0,8]:
            devnull = open(os.devnull, 'w')
            proc = subprocess.Popen(['squeue', '-u', 'bch'], stdout=subprocess.PIPE, stderr=devnull)
            output = proc.communicate()[0].split()
        if len(output)>8:
            runningIDs = []
            for item in output:
                if item.isdigit() and len(item) == 8:
                    runningIDs.append(item)
            for ir in range(nRunSlots):
                for id in npcopy(slotsJobIDs[ir]):
                    if id in runningIDs:
                        continue
                    else:
                        slotsJobIDs[ir].remove(id) 
        elif len(output) == 8:
            slotsJobIDs = [[]]*nRunSlots
        if len(slotsJobIDs[-1]) > 0:
               iwait += 1   
               time.sleep(10)
               print ' {}'.format(iwait),
    summary.close()  
    print 'Finished {} sets of parameters'.format(nPsets)
Example #13
0
    def get_relation_probs_for_points(self, points, landmark, landmark_heatmap,
                                      original_landmark_heatmap, perspective):
        def instantiate_relations(landmark):

            bullshit_trajector = Landmark(None, PointRepresentation(Vec2(0,
                                                                         0)),
                                          None)
            relations = []
            if not isinstance(landmark.representation, SurfaceRepresentation):
                for rel in DistanceRelationSet.relations:
                    for dist_class, deg_class in list(
                            product([
                                Measurement.NEAR
                                if rel == ToRelation else Measurement.FAR
                            ], Degree.all)):
                        relation = rel(perspective, landmark,
                                       bullshit_trajector)
                        relation.measurement.best_distance_class = dist_class
                        relation.measurement.best_degree_class = deg_class
                        relations.append(relation)

            for rel in ContainmentRelationSet.relations:
                relation = rel(perspective, landmark, bullshit_trajector)
                relations.append(relation)

            for rel in OrientationRelationSet.relations:
                for dist_class, deg_class in list(
                        product([Measurement.FAR], Degree.all)) + [
                            (Measurement.NONE, Degree.NONE)
                        ]:
                    # for dist_class, deg_class in [(Measurement.NONE,Degree.NONE)]:
                    relation = rel(perspective, landmark, bullshit_trajector)
                    relation.measurement.best_distance_class = dist_class
                    relation.measurement.best_degree_class = deg_class
                    relations.append(relation)
            return relations

        syms = ['\\', '|', '/', '-']

        relations = instantiate_relations(landmark)
        rel_points_probs = []
        original_probs = []
        sum_probs = None
        for i, relation in enumerate(relations):
            probs = self.get_probabilities_points(points, relation, None, None)
            if probs.sum() != 0:
                probs /= probs.sum()
            original_probs.append(npcopy(probs))
            if sum_probs is None: sum_probs = npcopy(probs)
            else: sum_probs += probs
            rel_points_probs.append(probs)
            sys.stdout.write("\b%s" % syms[i % len(syms)])
            sys.stdout.flush()

        # normalize across relations
        for probs in rel_points_probs:
            probs /= sum_probs
            probs *= landmark_heatmap

        # TODO treat ori_relations differently

        return relations, zip(rel_points_probs, original_probs)
Example #14
0
    def get_landmark_probs_for_points(self, landmarks, points, xs, ys, x, y):
        def get_probabilities(landmark):
            epsilon = 0.02
            distances = array([
                landmark.distance_to_point(point)
                if not (isinstance(landmark.representation,
                                   RectangleRepresentation)
                        and landmark.representation.contains_point(point)) else
                9 * epsilon for point in points
            ])
            # distances = array([ landmark.distance_to_point(point)
            #     if not (isinstance(landmark.representation,RectangleRepresentation) and landmark.representation.contains_point(point))
            #     else min(poly_to_vec_distance(landmark.representation.get_geometry().to_polygon(),point),landmark.representation.middle.distance_to(point))
            #     for point in points])
            # scores = 1.0/(distances + epsilon)**0.5
            std = .1
            scores = exp(-(distances / std)**2)
            # if scores.sum() != 0:
            # print landmark, scores.sum(), max(scores)
            return scores / scores.sum()
            # else: return scores

        sum_probs = None
        prob_lists = []
        original_probs = []

        syms = ['\\', '|', '/', '-']

        for i, landmark in enumerate(landmarks):
            probs = get_probabilities(landmark)
            original_probs.append(npcopy(probs))
            # probabilities = probs.reshape( (len(xs),len(ys)) ).T
            # plt.pcolor(x, y, probabilities, cmap = 'jet', edgecolors='none', alpha=0.7)
            # plt.colorbar()
            # plt.title(str(landmark)+" before")
            # plt.show()

            # print landmark, probs.sum(), max(probs)
            if sum_probs is None: sum_probs = npcopy(probs)
            else: sum_probs += probs
            prob_lists.append(probs)
            sys.stdout.write("\b%s" % syms[i % len(syms)])
            sys.stdout.flush()

        for lmk, probs in zip(landmarks, prob_lists):

            # print
            # probabilities = probs.reshape( (len(xs),len(ys)) ).T
            # plt.pcolor(x, y, probabilities, cmap = 'jet', edgecolors='none', alpha=0.7)
            # plt.colorbar()
            # plt.title(str(lmk)+" before")
            # plt.show()

            probs /= sum_probs

            # probabilities = probs.reshape( (len(xs),len(ys)) ).T
            # plt.pcolor(x, y, probabilities, cmap = 'jet', edgecolors='none', alpha=0.7)
            # plt.colorbar()
            # plt.title(str(lmk)+" after")
            # plt.show()

        return prob_lists, original_probs
Example #15
0
from .Regions import Regions

__all__ = ['spm','mph','hpd','mpy','dpy_noleap','dpy_gregorian','dpy_360','dpm_noleap','dpm_gregorian','dpm_360','g_per_Pg','g_per_kg','Ar_molar_mass','C_molar_mass','N_molar_mass','O_molar_mass','CO2_molar_mass','dry_air_molar_mass','dry_air_mass','dry_air_moles','co2_g_per_ppm','co2_ppm_per_kg','co2_ppm_per_C_Pg','regions','NCARclrs','NCARcmap','NCARnorm','region_names','dpy','mid_months','spd','spy']

# Time constants
spm              = 60.     # seconds per minute
mph              = 60.     # minutes per hour
hpd              = 24.     # hours per day
spd              = spm*mph*hpd
spy              = spd*365.
mpy              = 12.     # months per year
dpy_noleap       = 365.0   # days per year (for no leap year calendars)
dpy_gregorian    = 365.25  # days per year
dpy_360          = 360.0   # days per year (for 30 days/month)
dpm_noleap       = asarray([31,28,31,30,31,30,31,31,30,31,30,31],dtype='float') # days per month
dpm_gregorian    = npcopy(dpm_noleap) ; dpm_gregorian[1] = dpm_gregorian[1] + 0.25
dpm_360          = ones(int(mpy))*30.
mid_months       = asarray([15.5,45.,74.5,105.,135.5,166.,196.5,227.5,258.,288.5,319.,349.5],dtype='float')
lbl_months       = ["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]
bnd_months       = asarray([0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365],dtype='int')

dpy = {"noleap"    : dpy_noleap,
       "365_day"   : dpy_noleap,
       "360_day"   : dpy_360,
       "gregorian" : dpy_gregorian,
       "proleptic_gregorian" : dpy_gregorian}

# Mass unit conversions
g_per_Pg         = 1e+15   # grams per Pg
g_per_kg         = 1e+3    # grams per kg