コード例 #1
0
ファイル: run.py プロジェクト: sigh/Planet-Wars
def run_games(games):
    c = conn.cursor()

    # We want all games for current bot to be rerun
    # TODO: Check timestamp to determine when current has been updated
    clear_results('Current')
    
    results = cached_results(games)

    # get the games that don't have a result
    new_games = [ g for g,r in zip(games, results) if r == None ]

    # run the tournament for the new games only
    new_results = forkmap.map( run_game, new_games )

    # update results in the db
    for ((winner, moves), (args, map, players)) in zip(new_results, new_games):
        c.execute(
            "REPLACE INTO results VALUES ( ?, ?, ?, ?, ?, ? )",
            (
                players[0],
                players[1],
                map,
                OPTIONS['max_turns'],
                winner,
                moves
            )
        )
    conn.commit()

    return cached_results(games)
コード例 #2
0
			print "WTF, an IndexError ! "
			errorimglist.append(image)

		else:
			print "It worked !"

		psffilepath = os.path.join(imgpsfdir, "s001.fits")
		if os.path.islink(psffilepath):
			os.remove(psffilepath)
		os.symlink(os.path.join(imgpsfdir, "results", "s_1.fits"), psffilepath)
	
	
starttime = datetime.now()
#pool = multiprocessing.Pool(processes=ncorestouse)
#pool.map(buildpsf, images)
forkmap.map(buildpsf, images, n = ncorestouse)
endtime = datetime.now()
timetaken = nicetimediff(endtime - starttime)

if os.path.isfile(psfkicklist):
	print "The psfkicklist already exists :"
else:
	cmd = "touch " + psfkicklist
	os.system(cmd)
	print "I have just touched the psfkicklist for you :"
print psfkicklist

if len(errorimglist) != 0:
	print "pyMCS raised an IndexError on the following images :"
	print "(Add them to the psfkicklist, retry them with a testlist, ...)"
	print "\n".join(["%s\t%s" % (image['imgname'], "pyMCS IndexError") for image in errorimglist])
コード例 #3
0
ファイル: EvaluationFiber.py プロジェクト: mayzel/Tensor
def CrossValidation(
        trainingData,
        estimator,
        lossFunction,
        evaluatingDataSeparator,
        hyperParameters,
        parameterOptimizingDataSeparator):
    #Simplified CrossValidation

    global log

    import forkmap 

    @forkmap.parallelizable(6)
    def evaluateData(evalAndTrainIx,hyperParameters):
        (evalIx, trainIx) = evalAndTrainIx
        
        bestScore = float("inf")
        if not isinstance(hyperParameters,list):
            hyperParameters = [hyperParameters]

        doesOptimizeHyperParameter = len(hyperParameters) > 1

        alldata = arange(len(trainingData))

        assert(len(set(evalIx)&set(trainIx)) == 0)
        if(doesOptimizeHyperParameter):
            #ハイパーパラメータ最適化のループ
            for hyperParameter in hyperParameters:

                rawList = parameterOptimizingDataSeparator(trainIx)
                for (evalIxHP, trainIxHP) in rawList:

                    assert(len(set(evalIxHP)&set(evalIx)) == 0)
                    assert(len(set(trainIxHP) & set(evalIx)) == 0)

                    #print "Run LossFunction. hyperParam:", hyperParameter, 
                    estimation = estimator(hyperParameter,trainIxHP)
                    score = lossFunction(estimation,evalIxHP)
                    break #Simplified

                print "error:",score," at param=",hyperParameter
                if bestScore > score:
                    bestScore = score
                    bestParameter = hyperParameter
        else:
            bestParameter = hyperParameters[0]

        #最良のモデルを使う
        if doesOptimizeHyperParameter:
            print "Best parameter:",bestParameter
        #log.Write(", param, " + str(bestParameter))
        estimation = estimator(bestParameter,trainIx)

        score = lossFunction(estimation,alldata)
        print "Evaluation:",score
        return {"error":score,"param":bestParameter}
        #scores.append(score)
    
    from ThreadFunc import tmap
    #評価のループ。バリアンeスを作るループ
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    #datastream = Toolbox.ToArray(datastream)
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    datastream = evaluatingDataSeparator(arange(len(trainingData)))
    #datastream = Toolbox.ToArray(datastream)
    try:
        assert(False)
        print "Multi-Threading"
        def forkeval(data):
            return evaluateData(data,hyperParameters)
        result  = forkmap.map(forkeval,datastream)
        for r in result:
            yield r
    except Exception, e:
        print "Single Threading"
        for evalAndTrainIx in datastream:
            yield evaluateData(evalAndTrainIx,hyperParameters)
コード例 #4
0
        #print pssl, gain, readnoise, sigclip, sigfrac, objlim

        c.run(maxiter=3)

        ncosmics = np.sum(c.mask)
        #print ncosmics
        #if ncosmics != 0:
        #	print "--- %i pixels ---" % ncosmics

        # We write the mask :
        cosmics.tofits(starmaskfilename,
                       c.getdilatedmask(size=5),
                       verbose=False)

        # And the labels (for later png display) :
        cosmicslist = c.labelmask()
        writepickle(cosmicslist, starcosmicspklfilename, verbose=False)

        # We modify the sigma image, but keep a copy of the original :
        os.rename(sigfilename, origsigfilename)
        (sigarray, sigheader) = cosmics.fromfits(origsigfilename,
                                                 verbose=False)
        sigarray[c.getdilatedmask(size=5)] = 1.0e8
        cosmics.tofits(sigfilename, sigarray, sigheader, verbose=False)


forkmap.map(findcosmics, images, n=ncorestouse)

notify(computer, withsound, "Cosmics masked for psfname %s." % (psfname))
コード例 #5
0
def Evaluate(trainingData, estimator, lossFunction, evaluatingDataSeparator,
             hyperParameters, parameterOptimizingDataSeparator):
    """
    @param trainingData 訓練データのジェネレータ
    @param estimator ハイパーパラメータと訓練データを受け取って学習し、モデルを返す関数
    @param lossFunction 誤差関数。評価データとモデルを受け取り、誤差を返す。
    @param evaluatingDataSeparator 訓練データの一部を評価データとして分割するための関数
    @param hyperParameters 最適化を行うためのハイパーパラメータの集合をジェネレータで与える。
    @param parameterOptimizingDataSeparator 訓練データからハイパーパラメータ最適化用にデータを分割するための関数

    ハイパーパラメータ最適化をしながらクロスバリデーションを行う。
    """

    #Simplified CrossValidation

    global log

    import forkmap

    @forkmap.parallelizable(6)
    def evaluateData(evalAndTrainIx, hyperParameters):
        (evalIx, trainIx) = evalAndTrainIx

        bestScore = float("inf")
        if not isinstance(hyperParameters, list):
            hyperParameters = [hyperParameters]

        doesOptimizeHyperParameter = len(hyperParameters) > 1

        alldata = arange(len(trainingData))

        assert (len(set(evalIx) & set(trainIx)) == 0)
        if (doesOptimizeHyperParameter):
            #ハイパーパラメータ最適化のループ
            for hyperParameter in hyperParameters:

                rawList = parameterOptimizingDataSeparator(trainIx)
                for (evalIxHP, trainIxHP) in rawList:

                    assert (len(set(evalIxHP) & set(evalIx)) == 0)
                    assert (len(set(trainIxHP) & set(evalIx)) == 0)

                    #print "Run LossFunction. hyperParam:", hyperParameter,
                    estimation = estimator(hyperParameter, trainIxHP)
                    score = lossFunction(estimation, evalIxHP)
                    break  #Simplified

                print "error:", score, " at param=", hyperParameter
                if bestScore > score:
                    bestScore = score
                    bestParameter = hyperParameter
        else:
            bestParameter = hyperParameters[0]

        #最良のモデルを使う
        if doesOptimizeHyperParameter:
            print "Best parameter:", bestParameter
        #log.Write(", param, " + str(bestParameter))
        estimation = estimator(bestParameter, trainIx)

        score = lossFunction(estimation, alldata)
        print "-EVALUATION-:", score
        return {"error": score, "param": bestParameter}
        #scores.append(score)

    from ThreadFunc import tmap
    #評価のループ。バリアンeスを作るループ
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    #datastream = Toolbox.ToArray(datastream)
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    datastream = evaluatingDataSeparator(arange(len(trainingData)))
    #datastream = Toolbox.ToArray(datastream)
    try:
        #assert(False)
        print "Multi-Threading"

        def forkeval(data):
            return evaluateData(data, hyperParameters)

        result = forkmap.map(forkeval, datastream)
        for r in result:
            yield r
    except Exception, e:
        print "Single Threading"
        for evalAndTrainIx in datastream:
            yield evaluateData(evalAndTrainIx, hyperParameters)
コード例 #6
0
    img["execi"] = (
        i + 1
    )  # We do not write this into the db, it's just for this particular run.


def extractpsf(image):

    imgpsfdir = os.path.join(psfdir, image['imgname'])
    print "Image %i : %s" % (image["execi"], imgpsfdir)

    os.chdir(imgpsfdir)
    mcs = MCS_interface("pyMCS_psf_config.py")
    mcs.set_up_workspace(extract=True, clear=False, backup=False)


forkmap.map(extractpsf, images, n=ncorestouse)

notify(computer, withsound, "PSF extraction done for psfname %s." % (psfname))

# Now we help the user with the mask creation.
if refimgname in [img["imgname"] for img in images]:

    imgpsfdir = os.path.join(psfdir, refimgname)
    starfiles = sorted(
        glob.glob(os.path.join(imgpsfdir, "results", "star_*.fits")))

    print "The stars extracted from the reference image are available here :"
    print "\n".join(starfiles)
    print "You can now open these files with DS9 to build your mask (optional)."
    print "Don't mask cosmics, only companion stars !"
    print "Save your region files respectively here :"
コード例 #7
0
ファイル: CrossValidation.py プロジェクト: mayzel/Tensor
def Evaluate(
        trainingData,
        estimator,
        lossFunction,
        evaluatingDataSeparator,
        hyperParameters,
        parameterOptimizingDataSeparator):
    """
    @param trainingData 訓練データのジェネレータ
    @param estimator ハイパーパラメータと訓練データを受け取って学習し、モデルを返す関数
    @param lossFunction 誤差関数。評価データとモデルを受け取り、誤差を返す。
    @param evaluatingDataSeparator 訓練データの一部を評価データとして分割するための関数
    @param hyperParameters 最適化を行うためのハイパーパラメータの集合をジェネレータで与える。
    @param parameterOptimizingDataSeparator 訓練データからハイパーパラメータ最適化用にデータを分割するための関数

    ハイパーパラメータ最適化をしながらクロスバリデーションを行う。
    """

    #Simplified CrossValidation

    global log

    import forkmap 

    @forkmap.parallelizable(6)
    def evaluateData(evalAndTrainIx,hyperParameters):
        (evalIx, trainIx) = evalAndTrainIx
        
        bestScore = float("inf")
        if not isinstance(hyperParameters,list):
            hyperParameters = [hyperParameters]

        doesOptimizeHyperParameter = len(hyperParameters) > 1

        alldata = arange(len(trainingData))

        assert(len(set(evalIx)&set(trainIx)) == 0)
        if(doesOptimizeHyperParameter):
            #ハイパーパラメータ最適化のループ
            for hyperParameter in hyperParameters:

                rawList = parameterOptimizingDataSeparator(trainIx)
                for (evalIxHP, trainIxHP) in rawList:

                    assert(len(set(evalIxHP)&set(evalIx)) == 0)
                    assert(len(set(trainIxHP) & set(evalIx)) == 0)

                    #print "Run LossFunction. hyperParam:", hyperParameter, 
                    estimation = estimator(hyperParameter,trainIxHP)
                    score = lossFunction(estimation,evalIxHP)
                    break #Simplified

                print "error:",score," at param=",hyperParameter
                if bestScore > score:
                    bestScore = score
                    bestParameter = hyperParameter
        else:
            bestParameter = hyperParameters[0]

        #最良のモデルを使う
        if doesOptimizeHyperParameter:
            print "Best parameter:",bestParameter
        #log.Write(", param, " + str(bestParameter))
        estimation = estimator(bestParameter,trainIx)

        score = lossFunction(estimation,alldata)
        print "-EVALUATION-:",score
        return {"error":score,"param":bestParameter}
        #scores.append(score)
    
    from ThreadFunc import tmap
    #評価のループ。バリアンeスを作るループ
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    #datastream = Toolbox.ToArray(datastream)
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    datastream = evaluatingDataSeparator(arange(len(trainingData)))
    #datastream = Toolbox.ToArray(datastream)
    try:
        #assert(False)
        print "Multi-Threading"
        def forkeval(data):
            return evaluateData(data,hyperParameters)
        result  = forkmap.map(forkeval,datastream)
        for r in result:
            yield r
    except Exception, e:
        print "Single Threading"
        for evalAndTrainIx in datastream:
            yield evaluateData(evalAndTrainIx,hyperParameters)
コード例 #8
0
                                       xmin=1,
                                       xmax=dimx,
                                       ymin=1,
                                       ymax=dimy,
                                       Stdout=1)

    if os.path.isfile(databasename):
        os.remove(databasename)

    print "%i gregister done" % (image["execi"])

    return retdict


starttime = datetime.now()
retdicts = forkmap.map(aliimage, images, n=ncorestouse)
#pool = multiprocessing.Pool(processes=ncorestouse)
#pool.map(aliimage, images)

endtime = datetime.now()
timetaken = nicetimediff(endtime - starttime)

notify(computer, withsound,
       "Dear user, I'm done with the alignment. I did it in %s." % timetaken)
print("Now updating the database...")

backupfile(imgdb, dbbudir, "alignimages")

if "geomapangle" not in db.getFieldNames(imgdb):
    db.addFields(imgdb,
                 ['geomapangle:float', 'geomaprms:float', 'geomapscale:float'])
コード例 #9
0
ファイル: EvaluationFiber.py プロジェクト: mayzel/Tensor
def CrossValidation(trainingData, estimator, lossFunction,
                    evaluatingDataSeparator, hyperParameters,
                    parameterOptimizingDataSeparator):
    #Simplified CrossValidation

    global log

    import forkmap

    @forkmap.parallelizable(6)
    def evaluateData(evalAndTrainIx, hyperParameters):
        (evalIx, trainIx) = evalAndTrainIx

        bestScore = float("inf")
        if not isinstance(hyperParameters, list):
            hyperParameters = [hyperParameters]

        doesOptimizeHyperParameter = len(hyperParameters) > 1

        alldata = arange(len(trainingData))

        assert (len(set(evalIx) & set(trainIx)) == 0)
        if (doesOptimizeHyperParameter):
            #ハイパーパラメータ最適化のループ
            for hyperParameter in hyperParameters:

                rawList = parameterOptimizingDataSeparator(trainIx)
                for (evalIxHP, trainIxHP) in rawList:

                    assert (len(set(evalIxHP) & set(evalIx)) == 0)
                    assert (len(set(trainIxHP) & set(evalIx)) == 0)

                    #print "Run LossFunction. hyperParam:", hyperParameter,
                    estimation = estimator(hyperParameter, trainIxHP)
                    score = lossFunction(estimation, evalIxHP)
                    break  #Simplified

                print "error:", score, " at param=", hyperParameter
                if bestScore > score:
                    bestScore = score
                    bestParameter = hyperParameter
        else:
            bestParameter = hyperParameters[0]

        #最良のモデルを使う
        if doesOptimizeHyperParameter:
            print "Best parameter:", bestParameter
        #log.Write(", param, " + str(bestParameter))
        estimation = estimator(bestParameter, trainIx)

        score = lossFunction(estimation, alldata)
        print "Evaluation:", score
        return {"error": score, "param": bestParameter}
        #scores.append(score)

    from ThreadFunc import tmap
    #評価のループ。バリアンeスを作るループ
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    #datastream = Toolbox.ToArray(datastream)
    #datastream = Toolbox.ToArray(evaluatingDataSeparator(arange(len(trainingData))))
    datastream = evaluatingDataSeparator(arange(len(trainingData)))
    #datastream = Toolbox.ToArray(datastream)
    try:
        assert (False)
        print "Multi-Threading"

        def forkeval(data):
            return evaluateData(data, hyperParameters)

        result = forkmap.map(forkeval, datastream)
        for r in result:
            yield r
    except Exception, e:
        print "Single Threading"
        for evalAndTrainIx in datastream:
            yield evaluateData(evalAndTrainIx, hyperParameters)
コード例 #10
0
                    z_intmin[d] = s.depth

        #for d, minz in z_intmin.items():
        azim = azim/180*num.pi
        azims = [azim]*len(z_intmin.keys())
        d = z_intmin.keys()
        vals = z_intmin.values()
        DS.extend(d)
        AZIMS.extend(azims)
        VALS.extend(vals)
    

    #VALS = 5.-num.array(VALS)/1000.
    #VALS = correct_depth-num.array(VALS)/1000.
    VALS = num.array(VALS)/1000.
    #DS = [d-ref_source.depth for d in DS]
    AZIMS = num.array(AZIMS)
    DS = num.array(DS)/1000.
    fn = 'polar_%s_%s_%s_%s.txt'%(test_type, st, di, ra) 
    num.savetxt(fn, num.array((AZIMS, DS, VALS)))

sdrs = []
num_s = 6
for s in num.linspace(0., 360-360/num_s, num_s, include_last=False):
    for d in num.linspace(5,85, 5):
        for r in num.linspace(0,180,5):
            sdrs.append((s,d,r))


forkmap.map(doit, sdrs, n=len(sdrs))
コード例 #11
0
ファイル: ptest.py プロジェクト: mayzel/Tensor
import forkmap



@forkmap.parallelizable(4)
def mmm(n):
    while True:
        a=0
        print n
        for i in xrange(1000000):
            a=i*i



forkmap.map(mmm,xrange(4))